var/home/core/zuul-output/0000755000175000017500000000000015113613002014515 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015113624633015475 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004652472715113624625017723 0ustar rootrootDec 02 16:42:48 crc systemd[1]: Starting Kubernetes Kubelet... Dec 02 16:42:48 crc restorecon[4688]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 16:42:48 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 16:42:49 crc restorecon[4688]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 16:42:49 crc restorecon[4688]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 02 16:42:49 crc kubenswrapper[4747]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 16:42:49 crc kubenswrapper[4747]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 02 16:42:49 crc kubenswrapper[4747]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 16:42:49 crc kubenswrapper[4747]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 16:42:49 crc kubenswrapper[4747]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 02 16:42:49 crc kubenswrapper[4747]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.596012 4747 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599772 4747 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599797 4747 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599804 4747 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599809 4747 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599813 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599818 4747 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599828 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599833 4747 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599838 4747 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599841 4747 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599846 4747 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599850 4747 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599856 4747 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599862 4747 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599867 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599872 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599878 4747 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599883 4747 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599889 4747 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599897 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599918 4747 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599924 4747 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599932 4747 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599937 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599942 4747 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599947 4747 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599952 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599956 4747 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599961 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599967 4747 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599973 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599979 4747 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599984 4747 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599991 4747 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.599996 4747 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600001 4747 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600005 4747 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600010 4747 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600014 4747 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600019 4747 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600024 4747 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600028 4747 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600033 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600037 4747 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600041 4747 feature_gate.go:330] unrecognized feature gate: Example Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600046 4747 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600051 4747 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600056 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600061 4747 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600065 4747 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600072 4747 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600078 4747 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600082 4747 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600087 4747 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600092 4747 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600096 4747 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600101 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600106 4747 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600111 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600115 4747 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600120 4747 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600124 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600129 4747 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600135 4747 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600141 4747 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600146 4747 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600152 4747 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600158 4747 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600162 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600167 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.600171 4747 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600466 4747 flags.go:64] FLAG: --address="0.0.0.0" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600482 4747 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600496 4747 flags.go:64] FLAG: --anonymous-auth="true" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600507 4747 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600516 4747 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600521 4747 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600530 4747 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600537 4747 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600542 4747 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600548 4747 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600554 4747 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600560 4747 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600565 4747 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600570 4747 flags.go:64] FLAG: --cgroup-root="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600575 4747 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600579 4747 flags.go:64] FLAG: --client-ca-file="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600584 4747 flags.go:64] FLAG: --cloud-config="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600589 4747 flags.go:64] FLAG: --cloud-provider="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600593 4747 flags.go:64] FLAG: --cluster-dns="[]" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600602 4747 flags.go:64] FLAG: --cluster-domain="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600607 4747 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600613 4747 flags.go:64] FLAG: --config-dir="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600618 4747 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600631 4747 flags.go:64] FLAG: --container-log-max-files="5" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600639 4747 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600644 4747 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600650 4747 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600656 4747 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600662 4747 flags.go:64] FLAG: --contention-profiling="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600667 4747 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600673 4747 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600678 4747 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600684 4747 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600690 4747 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600695 4747 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600700 4747 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600707 4747 flags.go:64] FLAG: --enable-load-reader="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600712 4747 flags.go:64] FLAG: --enable-server="true" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600717 4747 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600724 4747 flags.go:64] FLAG: --event-burst="100" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600730 4747 flags.go:64] FLAG: --event-qps="50" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600735 4747 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600740 4747 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600749 4747 flags.go:64] FLAG: --eviction-hard="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600755 4747 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600760 4747 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600765 4747 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600770 4747 flags.go:64] FLAG: --eviction-soft="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600775 4747 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600780 4747 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600785 4747 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600790 4747 flags.go:64] FLAG: --experimental-mounter-path="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600795 4747 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600799 4747 flags.go:64] FLAG: --fail-swap-on="true" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600804 4747 flags.go:64] FLAG: --feature-gates="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600810 4747 flags.go:64] FLAG: --file-check-frequency="20s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600815 4747 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600820 4747 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600824 4747 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600830 4747 flags.go:64] FLAG: --healthz-port="10248" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600835 4747 flags.go:64] FLAG: --help="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600840 4747 flags.go:64] FLAG: --hostname-override="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600846 4747 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600851 4747 flags.go:64] FLAG: --http-check-frequency="20s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600856 4747 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600862 4747 flags.go:64] FLAG: --image-credential-provider-config="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600867 4747 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600872 4747 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600878 4747 flags.go:64] FLAG: --image-service-endpoint="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600883 4747 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600888 4747 flags.go:64] FLAG: --kube-api-burst="100" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600893 4747 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600899 4747 flags.go:64] FLAG: --kube-api-qps="50" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600927 4747 flags.go:64] FLAG: --kube-reserved="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600933 4747 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600942 4747 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600948 4747 flags.go:64] FLAG: --kubelet-cgroups="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600953 4747 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600960 4747 flags.go:64] FLAG: --lock-file="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600965 4747 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600970 4747 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600976 4747 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600984 4747 flags.go:64] FLAG: --log-json-split-stream="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600990 4747 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.600995 4747 flags.go:64] FLAG: --log-text-split-stream="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601000 4747 flags.go:64] FLAG: --logging-format="text" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601006 4747 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601012 4747 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601017 4747 flags.go:64] FLAG: --manifest-url="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601022 4747 flags.go:64] FLAG: --manifest-url-header="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601029 4747 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601035 4747 flags.go:64] FLAG: --max-open-files="1000000" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601042 4747 flags.go:64] FLAG: --max-pods="110" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601047 4747 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601053 4747 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601059 4747 flags.go:64] FLAG: --memory-manager-policy="None" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601064 4747 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601069 4747 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601074 4747 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601079 4747 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601092 4747 flags.go:64] FLAG: --node-status-max-images="50" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601097 4747 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601101 4747 flags.go:64] FLAG: --oom-score-adj="-999" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601106 4747 flags.go:64] FLAG: --pod-cidr="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601112 4747 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601121 4747 flags.go:64] FLAG: --pod-manifest-path="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601126 4747 flags.go:64] FLAG: --pod-max-pids="-1" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601132 4747 flags.go:64] FLAG: --pods-per-core="0" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601139 4747 flags.go:64] FLAG: --port="10250" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601145 4747 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601150 4747 flags.go:64] FLAG: --provider-id="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601155 4747 flags.go:64] FLAG: --qos-reserved="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601161 4747 flags.go:64] FLAG: --read-only-port="10255" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601165 4747 flags.go:64] FLAG: --register-node="true" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601171 4747 flags.go:64] FLAG: --register-schedulable="true" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601175 4747 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601185 4747 flags.go:64] FLAG: --registry-burst="10" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601190 4747 flags.go:64] FLAG: --registry-qps="5" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601195 4747 flags.go:64] FLAG: --reserved-cpus="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601201 4747 flags.go:64] FLAG: --reserved-memory="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601208 4747 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601214 4747 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601220 4747 flags.go:64] FLAG: --rotate-certificates="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601225 4747 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601231 4747 flags.go:64] FLAG: --runonce="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601236 4747 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601241 4747 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601246 4747 flags.go:64] FLAG: --seccomp-default="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601251 4747 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601256 4747 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601261 4747 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601267 4747 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601274 4747 flags.go:64] FLAG: --storage-driver-password="root" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601279 4747 flags.go:64] FLAG: --storage-driver-secure="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601285 4747 flags.go:64] FLAG: --storage-driver-table="stats" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601292 4747 flags.go:64] FLAG: --storage-driver-user="root" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601298 4747 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601305 4747 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601311 4747 flags.go:64] FLAG: --system-cgroups="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601316 4747 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601327 4747 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601333 4747 flags.go:64] FLAG: --tls-cert-file="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601338 4747 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601346 4747 flags.go:64] FLAG: --tls-min-version="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601351 4747 flags.go:64] FLAG: --tls-private-key-file="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601356 4747 flags.go:64] FLAG: --topology-manager-policy="none" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601361 4747 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601366 4747 flags.go:64] FLAG: --topology-manager-scope="container" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601371 4747 flags.go:64] FLAG: --v="2" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601379 4747 flags.go:64] FLAG: --version="false" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601386 4747 flags.go:64] FLAG: --vmodule="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601393 4747 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601399 4747 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601586 4747 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601594 4747 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601599 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601604 4747 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601609 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601612 4747 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601617 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601621 4747 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601624 4747 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601629 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601633 4747 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601637 4747 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601641 4747 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601645 4747 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601657 4747 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601661 4747 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601666 4747 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601670 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601675 4747 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601681 4747 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601686 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601690 4747 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601695 4747 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601699 4747 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601705 4747 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601711 4747 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601717 4747 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601722 4747 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601726 4747 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601731 4747 feature_gate.go:330] unrecognized feature gate: Example Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601742 4747 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601747 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601752 4747 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601756 4747 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601761 4747 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601765 4747 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601770 4747 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601775 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601780 4747 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601786 4747 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601791 4747 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601795 4747 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601800 4747 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601805 4747 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601810 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601815 4747 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601819 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601823 4747 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601827 4747 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601831 4747 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601838 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601843 4747 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601848 4747 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601852 4747 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601856 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601860 4747 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601865 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601869 4747 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601873 4747 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601878 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601882 4747 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601886 4747 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601893 4747 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601897 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601921 4747 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601928 4747 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601933 4747 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601938 4747 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601942 4747 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601947 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.601951 4747 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.601958 4747 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.616862 4747 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.616935 4747 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617043 4747 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617056 4747 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617062 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617067 4747 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617073 4747 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617078 4747 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617083 4747 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617089 4747 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617094 4747 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617099 4747 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617104 4747 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617108 4747 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617113 4747 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617117 4747 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617122 4747 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617127 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617132 4747 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617137 4747 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617141 4747 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617146 4747 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617151 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617156 4747 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617161 4747 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617165 4747 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617170 4747 feature_gate.go:330] unrecognized feature gate: Example Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617179 4747 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617186 4747 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617192 4747 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617199 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617204 4747 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617210 4747 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617216 4747 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617223 4747 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617250 4747 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617258 4747 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617263 4747 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617268 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617273 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617278 4747 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617283 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617288 4747 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617293 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617297 4747 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617302 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617307 4747 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617311 4747 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617316 4747 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617321 4747 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617325 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617330 4747 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617335 4747 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617339 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617344 4747 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617349 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617355 4747 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617359 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617364 4747 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617368 4747 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617372 4747 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617379 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617384 4747 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617390 4747 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617396 4747 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617401 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617405 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617410 4747 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617414 4747 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617418 4747 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617422 4747 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617426 4747 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617431 4747 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.617440 4747 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617586 4747 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617595 4747 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617600 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617607 4747 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617613 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617618 4747 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617623 4747 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617628 4747 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617633 4747 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617638 4747 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617641 4747 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617646 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617650 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617654 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617658 4747 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617665 4747 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617671 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617676 4747 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617682 4747 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617689 4747 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617694 4747 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617699 4747 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617704 4747 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617708 4747 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617713 4747 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617717 4747 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617722 4747 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617726 4747 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617730 4747 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617735 4747 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617740 4747 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617744 4747 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617748 4747 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617752 4747 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617756 4747 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617761 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617766 4747 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617780 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617784 4747 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617789 4747 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617794 4747 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617799 4747 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617803 4747 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617807 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617810 4747 feature_gate.go:330] unrecognized feature gate: Example Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617815 4747 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617819 4747 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617824 4747 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617829 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617833 4747 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617837 4747 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617841 4747 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617846 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617850 4747 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617854 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617858 4747 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617862 4747 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617866 4747 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617870 4747 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617874 4747 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617878 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617883 4747 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617887 4747 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617891 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617895 4747 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617899 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617924 4747 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617928 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617932 4747 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617936 4747 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.617940 4747 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.617948 4747 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.618432 4747 server.go:940] "Client rotation is on, will bootstrap in background" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.621144 4747 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.621242 4747 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.621696 4747 server.go:997] "Starting client certificate rotation" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.621717 4747 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.621977 4747 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-22 12:04:27.255765021 +0000 UTC Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.622061 4747 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.626965 4747 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 16:42:49 crc kubenswrapper[4747]: E1202 16:42:49.629246 4747 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.629727 4747 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.638767 4747 log.go:25] "Validated CRI v1 runtime API" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.659371 4747 log.go:25] "Validated CRI v1 image API" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.662078 4747 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.664760 4747 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-02-16-36-30-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.664812 4747 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.687621 4747 manager.go:217] Machine: {Timestamp:2025-12-02 16:42:49.685866148 +0000 UTC m=+0.212754947 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:e9e860e5-c32f-4d91-a884-f294326f5bb2 BootID:a121f387-7df9-4ca6-bc20-7c686c9d2626 Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:0c:1e:1f Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:0c:1e:1f Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:52:ab:d1 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:c8:95:c0 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:68:2b:e4 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:7c:4b:77 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:12:29:00:49:d1:e4 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:6a:38:24:a2:10:0f Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.687964 4747 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.688166 4747 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.688924 4747 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.689234 4747 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.689285 4747 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.689587 4747 topology_manager.go:138] "Creating topology manager with none policy" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.689603 4747 container_manager_linux.go:303] "Creating device plugin manager" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.689891 4747 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.689953 4747 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.690429 4747 state_mem.go:36] "Initialized new in-memory state store" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.690650 4747 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.691467 4747 kubelet.go:418] "Attempting to sync node with API server" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.691497 4747 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.691531 4747 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.691551 4747 kubelet.go:324] "Adding apiserver pod source" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.691607 4747 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.693602 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 02 16:42:49 crc kubenswrapper[4747]: E1202 16:42:49.693724 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.694266 4747 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.694334 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 02 16:42:49 crc kubenswrapper[4747]: E1202 16:42:49.694438 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.694669 4747 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.695437 4747 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.695948 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.695976 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.695985 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.695993 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.696005 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.696015 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.696023 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.696033 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.696042 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.696050 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.696078 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.696085 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.696296 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.696822 4747 server.go:1280] "Started kubelet" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.697148 4747 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.697159 4747 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.697167 4747 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.698425 4747 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 02 16:42:49 crc systemd[1]: Started Kubernetes Kubelet. Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.700017 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.700331 4747 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.700348 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 18:29:46.721394652 +0000 UTC Dec 02 16:42:49 crc kubenswrapper[4747]: E1202 16:42:49.700524 4747 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.700597 4747 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.700606 4747 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 02 16:42:49 crc kubenswrapper[4747]: E1202 16:42:49.699359 4747 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.201:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187d73a2401f46ad default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 16:42:49.696790189 +0000 UTC m=+0.223678958,LastTimestamp:2025-12-02 16:42:49.696790189 +0000 UTC m=+0.223678958,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.700669 4747 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.704432 4747 server.go:460] "Adding debug handlers to kubelet server" Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.705623 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 02 16:42:49 crc kubenswrapper[4747]: E1202 16:42:49.705711 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 02 16:42:49 crc kubenswrapper[4747]: E1202 16:42:49.705817 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="200ms" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.706386 4747 factory.go:55] Registering systemd factory Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.706421 4747 factory.go:221] Registration of the systemd container factory successfully Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.708882 4747 factory.go:153] Registering CRI-O factory Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.708950 4747 factory.go:221] Registration of the crio container factory successfully Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.709017 4747 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.709046 4747 factory.go:103] Registering Raw factory Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.709067 4747 manager.go:1196] Started watching for new ooms in manager Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.709651 4747 manager.go:319] Starting recovery of all containers Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.713760 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.713827 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.713841 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.713854 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.713866 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.713877 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.713887 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.713897 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.713921 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.713933 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.713944 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.713954 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.713965 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.713978 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.713989 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.713999 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714010 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714020 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714029 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714042 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714088 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714099 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714108 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714118 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714128 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714140 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714152 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714164 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714174 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714183 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714192 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714202 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714210 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714219 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714228 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714237 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714246 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714255 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714265 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714274 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714284 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714292 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714302 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714311 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714320 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714330 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714337 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714346 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714354 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714362 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714371 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714381 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714393 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714404 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714416 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714426 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714436 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714445 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714454 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714462 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714471 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714514 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714528 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714537 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714546 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714555 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714563 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714573 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714582 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714592 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714602 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714612 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714629 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714647 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714665 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714682 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714692 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714705 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714714 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714724 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714734 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714743 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714754 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714765 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714774 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714784 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714794 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714802 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714811 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714820 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714829 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714837 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714848 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714857 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714866 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714875 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714884 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714894 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714920 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714930 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714939 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714947 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714957 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714966 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714980 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.714990 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715000 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715019 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715039 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715055 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715067 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715080 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715092 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715103 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715115 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715125 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715138 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715150 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715163 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715175 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715228 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715238 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715246 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715254 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715263 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715271 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715280 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715289 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715298 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715307 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715316 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715325 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715333 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715341 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715350 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715384 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715393 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715402 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715412 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715421 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715431 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715441 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715452 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715461 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715470 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715479 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715488 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715498 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715506 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715515 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715524 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715534 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715543 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715554 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715563 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715573 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715584 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715593 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715604 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715616 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715627 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715639 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715657 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715679 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715700 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715720 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715738 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715750 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715761 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715773 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715856 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715869 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715880 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715893 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.715930 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716454 4747 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716482 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716496 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716506 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716515 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716525 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716538 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716547 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716557 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716567 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716576 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716585 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716596 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716610 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716634 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716650 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716665 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716678 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716690 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716710 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716725 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716734 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716746 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716755 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716765 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716775 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716785 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716795 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716806 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716816 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716827 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716836 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716846 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716855 4747 reconstruct.go:97] "Volume reconstruction finished" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.716863 4747 reconciler.go:26] "Reconciler: start to sync state" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.723749 4747 manager.go:324] Recovery completed Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.732668 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.736796 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.736851 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.736862 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.738971 4747 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.739002 4747 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.739038 4747 state_mem.go:36] "Initialized new in-memory state store" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.750869 4747 policy_none.go:49] "None policy: Start" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.752205 4747 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.752245 4747 state_mem.go:35] "Initializing new in-memory state store" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.755890 4747 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.758798 4747 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.759192 4747 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.759228 4747 kubelet.go:2335] "Starting kubelet main sync loop" Dec 02 16:42:49 crc kubenswrapper[4747]: E1202 16:42:49.759275 4747 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 02 16:42:49 crc kubenswrapper[4747]: W1202 16:42:49.760326 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 02 16:42:49 crc kubenswrapper[4747]: E1202 16:42:49.760385 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 02 16:42:49 crc kubenswrapper[4747]: E1202 16:42:49.801184 4747 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.804282 4747 manager.go:334] "Starting Device Plugin manager" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.804469 4747 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.804489 4747 server.go:79] "Starting device plugin registration server" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.805029 4747 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.805051 4747 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.805406 4747 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.805900 4747 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.806069 4747 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 02 16:42:49 crc kubenswrapper[4747]: E1202 16:42:49.813039 4747 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.860419 4747 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.860554 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.862219 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.862255 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.862264 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.862429 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.863032 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.863152 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.863232 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.863267 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.863282 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.863394 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.863569 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.863636 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.863944 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.863964 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.863974 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.864499 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.864521 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.864531 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.864534 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.864558 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.864572 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.864675 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.864685 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.864702 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.866658 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.866675 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.866700 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.866852 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.866879 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.866889 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.866988 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.867244 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.867298 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.868887 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.868941 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.868954 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.869022 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.869064 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.869076 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.869215 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.869271 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.869982 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.870015 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.870026 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.905797 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:49 crc kubenswrapper[4747]: E1202 16:42:49.906475 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="400ms" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.906834 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.906862 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.906871 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.906890 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 16:42:49 crc kubenswrapper[4747]: E1202 16:42:49.907237 4747 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.201:6443: connect: connection refused" node="crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.919590 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.919637 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.919662 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.919680 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.919697 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.919717 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.919746 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.919776 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.919812 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.919847 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.919870 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.919891 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.919927 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.919952 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 16:42:49 crc kubenswrapper[4747]: I1202 16:42:49.919993 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.020844 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.020928 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.020943 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.020958 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.020973 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.020996 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021011 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021024 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021052 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021070 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021177 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021181 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021234 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021274 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021247 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021188 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021300 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021279 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021261 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021253 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021304 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021362 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021324 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021406 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021432 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021496 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021544 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021580 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021639 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.021792 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.107686 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.113832 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.113923 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.113937 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.113970 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 16:42:50 crc kubenswrapper[4747]: E1202 16:42:50.114659 4747 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.201:6443: connect: connection refused" node="crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.184536 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.207665 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: W1202 16:42:50.210976 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-8dcb5c04f7e5f6d9c76d8fe1a8c989d5230be87b16a3a688a1581f9695ed3c7d WatchSource:0}: Error finding container 8dcb5c04f7e5f6d9c76d8fe1a8c989d5230be87b16a3a688a1581f9695ed3c7d: Status 404 returned error can't find the container with id 8dcb5c04f7e5f6d9c76d8fe1a8c989d5230be87b16a3a688a1581f9695ed3c7d Dec 02 16:42:50 crc kubenswrapper[4747]: W1202 16:42:50.224672 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-6468a943691cd4598221f47c36c7d0145a1e44849ef314acac3a72f120699719 WatchSource:0}: Error finding container 6468a943691cd4598221f47c36c7d0145a1e44849ef314acac3a72f120699719: Status 404 returned error can't find the container with id 6468a943691cd4598221f47c36c7d0145a1e44849ef314acac3a72f120699719 Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.227957 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.233931 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.239622 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 02 16:42:50 crc kubenswrapper[4747]: W1202 16:42:50.244109 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-13259f2e0a0feaf7e0e95bb347376eada8fed97a6cea74ce296159fbb26e63e4 WatchSource:0}: Error finding container 13259f2e0a0feaf7e0e95bb347376eada8fed97a6cea74ce296159fbb26e63e4: Status 404 returned error can't find the container with id 13259f2e0a0feaf7e0e95bb347376eada8fed97a6cea74ce296159fbb26e63e4 Dec 02 16:42:50 crc kubenswrapper[4747]: W1202 16:42:50.250008 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-a55211eb52f9438b3e35b0503a0b8f4494bcd4af97cd764cb225b5ee2b3c5f48 WatchSource:0}: Error finding container a55211eb52f9438b3e35b0503a0b8f4494bcd4af97cd764cb225b5ee2b3c5f48: Status 404 returned error can't find the container with id a55211eb52f9438b3e35b0503a0b8f4494bcd4af97cd764cb225b5ee2b3c5f48 Dec 02 16:42:50 crc kubenswrapper[4747]: W1202 16:42:50.254507 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-42fead59e267c75a4b15a5b0e6c1a5fe42edb875886232bfed0b81f74d1bf29c WatchSource:0}: Error finding container 42fead59e267c75a4b15a5b0e6c1a5fe42edb875886232bfed0b81f74d1bf29c: Status 404 returned error can't find the container with id 42fead59e267c75a4b15a5b0e6c1a5fe42edb875886232bfed0b81f74d1bf29c Dec 02 16:42:50 crc kubenswrapper[4747]: E1202 16:42:50.307613 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="800ms" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.515427 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.517156 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.517212 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.517224 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.517257 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 16:42:50 crc kubenswrapper[4747]: E1202 16:42:50.517846 4747 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.201:6443: connect: connection refused" node="crc" Dec 02 16:42:50 crc kubenswrapper[4747]: W1202 16:42:50.582408 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 02 16:42:50 crc kubenswrapper[4747]: E1202 16:42:50.582498 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.698046 4747 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.701571 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 09:57:16.306134396 +0000 UTC Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.701640 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 17h14m25.604497369s for next certificate rotation Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.766003 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e"} Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.766172 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6468a943691cd4598221f47c36c7d0145a1e44849ef314acac3a72f120699719"} Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.767479 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75" exitCode=0 Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.767544 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75"} Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.767566 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8dcb5c04f7e5f6d9c76d8fe1a8c989d5230be87b16a3a688a1581f9695ed3c7d"} Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.767687 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.768989 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.769026 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.769039 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.769050 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2"} Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.769036 4747 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2" exitCode=0 Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.769190 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"42fead59e267c75a4b15a5b0e6c1a5fe42edb875886232bfed0b81f74d1bf29c"} Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.769266 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.769833 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.769856 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.769867 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.770999 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.771749 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.771779 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.771790 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.771937 4747 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="8b3e5f53870895e7908e862ed8bd59c4019ae3a0c8057418b3295104b982e62c" exitCode=0 Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.772001 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"8b3e5f53870895e7908e862ed8bd59c4019ae3a0c8057418b3295104b982e62c"} Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.772021 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"a55211eb52f9438b3e35b0503a0b8f4494bcd4af97cd764cb225b5ee2b3c5f48"} Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.772081 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.773009 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.773047 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.773058 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.773840 4747 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385" exitCode=0 Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.773867 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385"} Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.773886 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"13259f2e0a0feaf7e0e95bb347376eada8fed97a6cea74ce296159fbb26e63e4"} Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.773990 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.774713 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.774748 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:50 crc kubenswrapper[4747]: I1202 16:42:50.774759 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:51 crc kubenswrapper[4747]: E1202 16:42:51.110186 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="1.6s" Dec 02 16:42:51 crc kubenswrapper[4747]: W1202 16:42:51.186085 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 02 16:42:51 crc kubenswrapper[4747]: E1202 16:42:51.186221 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 02 16:42:51 crc kubenswrapper[4747]: W1202 16:42:51.235812 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 02 16:42:51 crc kubenswrapper[4747]: E1202 16:42:51.235957 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 02 16:42:51 crc kubenswrapper[4747]: W1202 16:42:51.259315 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.201:6443: connect: connection refused Dec 02 16:42:51 crc kubenswrapper[4747]: E1202 16:42:51.259400 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.201:6443: connect: connection refused" logger="UnhandledError" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.318335 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.320636 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.320695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.320706 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.320737 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 16:42:51 crc kubenswrapper[4747]: E1202 16:42:51.321501 4747 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.201:6443: connect: connection refused" node="crc" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.718593 4747 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.781007 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f"} Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.781080 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d"} Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.781099 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa"} Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.781101 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.782060 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.782110 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.782125 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.785125 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22"} Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.785182 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208"} Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.785207 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf"} Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.785230 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86"} Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.787975 4747 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5" exitCode=0 Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.788051 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5"} Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.788301 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.789518 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.789605 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.789670 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.793401 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"54b6412bd9889bd4052094a6a615f95516b2ad40be5b683c168627f98c9c180a"} Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.793574 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.798523 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.798566 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.798577 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.800699 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"bcb72d2b6b402f07ecf53060ed7662c37883ec1e95b3b2bd130097e4190a6f16"} Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.800740 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"cee76ff07172c41ea65c21e54f8951d0223d6576e71d2e4ccf80001b5e3b9190"} Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.800757 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"4b04826df65b6eeb39190ed96d8a9b19e6c6d512c9959e7fcba556f81cdb6ae4"} Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.800870 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.801616 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.801649 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:51 crc kubenswrapper[4747]: I1202 16:42:51.801661 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.810022 4747 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2" exitCode=0 Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.810158 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2"} Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.810329 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.812139 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.812208 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.812233 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.816697 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5"} Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.816760 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.816760 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.818630 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.818701 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.818727 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.819326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.819371 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.819391 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.922217 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.923699 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.923751 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.923767 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:52 crc kubenswrapper[4747]: I1202 16:42:52.923808 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 16:42:53 crc kubenswrapper[4747]: I1202 16:42:53.823242 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e"} Dec 02 16:42:53 crc kubenswrapper[4747]: I1202 16:42:53.823324 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672"} Dec 02 16:42:53 crc kubenswrapper[4747]: I1202 16:42:53.823342 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:53 crc kubenswrapper[4747]: I1202 16:42:53.823345 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf"} Dec 02 16:42:53 crc kubenswrapper[4747]: I1202 16:42:53.823463 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602"} Dec 02 16:42:53 crc kubenswrapper[4747]: I1202 16:42:53.823480 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016"} Dec 02 16:42:53 crc kubenswrapper[4747]: I1202 16:42:53.823267 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 16:42:53 crc kubenswrapper[4747]: I1202 16:42:53.823516 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:53 crc kubenswrapper[4747]: I1202 16:42:53.829259 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:53 crc kubenswrapper[4747]: I1202 16:42:53.829319 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:53 crc kubenswrapper[4747]: I1202 16:42:53.829349 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:53 crc kubenswrapper[4747]: I1202 16:42:53.829375 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:53 crc kubenswrapper[4747]: I1202 16:42:53.829386 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:53 crc kubenswrapper[4747]: I1202 16:42:53.829416 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:54 crc kubenswrapper[4747]: I1202 16:42:54.625369 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:42:54 crc kubenswrapper[4747]: I1202 16:42:54.625612 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:54 crc kubenswrapper[4747]: I1202 16:42:54.627148 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:54 crc kubenswrapper[4747]: I1202 16:42:54.627212 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:54 crc kubenswrapper[4747]: I1202 16:42:54.627228 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:54 crc kubenswrapper[4747]: I1202 16:42:54.760577 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:42:54 crc kubenswrapper[4747]: I1202 16:42:54.825719 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 16:42:54 crc kubenswrapper[4747]: I1202 16:42:54.825806 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:54 crc kubenswrapper[4747]: I1202 16:42:54.825823 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:54 crc kubenswrapper[4747]: I1202 16:42:54.827223 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:54 crc kubenswrapper[4747]: I1202 16:42:54.827261 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:54 crc kubenswrapper[4747]: I1202 16:42:54.827273 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:54 crc kubenswrapper[4747]: I1202 16:42:54.827549 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:54 crc kubenswrapper[4747]: I1202 16:42:54.827607 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:54 crc kubenswrapper[4747]: I1202 16:42:54.827632 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:55 crc kubenswrapper[4747]: I1202 16:42:55.468615 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:42:55 crc kubenswrapper[4747]: I1202 16:42:55.564077 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 16:42:55 crc kubenswrapper[4747]: I1202 16:42:55.564368 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:55 crc kubenswrapper[4747]: I1202 16:42:55.566086 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:55 crc kubenswrapper[4747]: I1202 16:42:55.566157 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:55 crc kubenswrapper[4747]: I1202 16:42:55.566180 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:55 crc kubenswrapper[4747]: I1202 16:42:55.828624 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 16:42:55 crc kubenswrapper[4747]: I1202 16:42:55.828703 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:55 crc kubenswrapper[4747]: I1202 16:42:55.829994 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:55 crc kubenswrapper[4747]: I1202 16:42:55.830059 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:55 crc kubenswrapper[4747]: I1202 16:42:55.830084 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:57 crc kubenswrapper[4747]: I1202 16:42:57.605418 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 02 16:42:57 crc kubenswrapper[4747]: I1202 16:42:57.605691 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:57 crc kubenswrapper[4747]: I1202 16:42:57.607056 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:57 crc kubenswrapper[4747]: I1202 16:42:57.607109 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:57 crc kubenswrapper[4747]: I1202 16:42:57.607121 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:57 crc kubenswrapper[4747]: I1202 16:42:57.763871 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:42:57 crc kubenswrapper[4747]: I1202 16:42:57.764108 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:57 crc kubenswrapper[4747]: I1202 16:42:57.765338 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:57 crc kubenswrapper[4747]: I1202 16:42:57.765408 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:57 crc kubenswrapper[4747]: I1202 16:42:57.765420 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:58 crc kubenswrapper[4747]: I1202 16:42:58.041308 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:42:58 crc kubenswrapper[4747]: I1202 16:42:58.041496 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:42:58 crc kubenswrapper[4747]: I1202 16:42:58.042871 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:42:58 crc kubenswrapper[4747]: I1202 16:42:58.043026 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:42:58 crc kubenswrapper[4747]: I1202 16:42:58.043055 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:42:59 crc kubenswrapper[4747]: E1202 16:42:59.813821 4747 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 02 16:43:00 crc kubenswrapper[4747]: I1202 16:43:00.048056 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:43:00 crc kubenswrapper[4747]: I1202 16:43:00.048305 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:43:00 crc kubenswrapper[4747]: I1202 16:43:00.050085 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:00 crc kubenswrapper[4747]: I1202 16:43:00.050155 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:00 crc kubenswrapper[4747]: I1202 16:43:00.050170 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:00 crc kubenswrapper[4747]: I1202 16:43:00.054026 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:43:00 crc kubenswrapper[4747]: I1202 16:43:00.383170 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:43:00 crc kubenswrapper[4747]: I1202 16:43:00.390146 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:43:00 crc kubenswrapper[4747]: I1202 16:43:00.763964 4747 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 16:43:00 crc kubenswrapper[4747]: I1202 16:43:00.764093 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 16:43:00 crc kubenswrapper[4747]: I1202 16:43:00.846706 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:43:00 crc kubenswrapper[4747]: I1202 16:43:00.848313 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:00 crc kubenswrapper[4747]: I1202 16:43:00.848399 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:00 crc kubenswrapper[4747]: I1202 16:43:00.848444 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:01 crc kubenswrapper[4747]: I1202 16:43:01.696277 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 02 16:43:01 crc kubenswrapper[4747]: I1202 16:43:01.696699 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:43:01 crc kubenswrapper[4747]: I1202 16:43:01.697897 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:01 crc kubenswrapper[4747]: I1202 16:43:01.697967 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:01 crc kubenswrapper[4747]: I1202 16:43:01.697977 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:01 crc kubenswrapper[4747]: I1202 16:43:01.698825 4747 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 02 16:43:01 crc kubenswrapper[4747]: E1202 16:43:01.720360 4747 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 02 16:43:01 crc kubenswrapper[4747]: I1202 16:43:01.849350 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:43:01 crc kubenswrapper[4747]: I1202 16:43:01.850854 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:01 crc kubenswrapper[4747]: I1202 16:43:01.850924 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:01 crc kubenswrapper[4747]: I1202 16:43:01.850937 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:02 crc kubenswrapper[4747]: I1202 16:43:02.051853 4747 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 02 16:43:02 crc kubenswrapper[4747]: I1202 16:43:02.051955 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 02 16:43:02 crc kubenswrapper[4747]: I1202 16:43:02.057983 4747 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 02 16:43:02 crc kubenswrapper[4747]: I1202 16:43:02.058082 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 02 16:43:04 crc kubenswrapper[4747]: I1202 16:43:04.766732 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:43:04 crc kubenswrapper[4747]: I1202 16:43:04.766960 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:43:04 crc kubenswrapper[4747]: I1202 16:43:04.767447 4747 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 02 16:43:04 crc kubenswrapper[4747]: I1202 16:43:04.767518 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 02 16:43:04 crc kubenswrapper[4747]: I1202 16:43:04.768092 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:04 crc kubenswrapper[4747]: I1202 16:43:04.768150 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:04 crc kubenswrapper[4747]: I1202 16:43:04.768167 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:04 crc kubenswrapper[4747]: I1202 16:43:04.771126 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:43:04 crc kubenswrapper[4747]: I1202 16:43:04.856612 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:43:04 crc kubenswrapper[4747]: I1202 16:43:04.857166 4747 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 02 16:43:04 crc kubenswrapper[4747]: I1202 16:43:04.857259 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 02 16:43:04 crc kubenswrapper[4747]: I1202 16:43:04.857533 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:04 crc kubenswrapper[4747]: I1202 16:43:04.857570 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:04 crc kubenswrapper[4747]: I1202 16:43:04.857582 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:05 crc kubenswrapper[4747]: I1202 16:43:05.578275 4747 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 02 16:43:05 crc kubenswrapper[4747]: I1202 16:43:05.578411 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 02 16:43:05 crc kubenswrapper[4747]: I1202 16:43:05.762527 4747 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 02 16:43:05 crc kubenswrapper[4747]: I1202 16:43:05.776895 4747 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.051613 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.054527 4747 trace.go:236] Trace[1704812091]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 16:42:53.810) (total time: 13244ms): Dec 02 16:43:07 crc kubenswrapper[4747]: Trace[1704812091]: ---"Objects listed" error: 13244ms (16:43:07.054) Dec 02 16:43:07 crc kubenswrapper[4747]: Trace[1704812091]: [13.244303996s] [13.244303996s] END Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.054566 4747 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.054733 4747 trace.go:236] Trace[1748155990]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 16:42:53.383) (total time: 13670ms): Dec 02 16:43:07 crc kubenswrapper[4747]: Trace[1748155990]: ---"Objects listed" error: 13670ms (16:43:07.054) Dec 02 16:43:07 crc kubenswrapper[4747]: Trace[1748155990]: [13.670714543s] [13.670714543s] END Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.054758 4747 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.055787 4747 trace.go:236] Trace[2091120489]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 16:42:53.474) (total time: 13581ms): Dec 02 16:43:07 crc kubenswrapper[4747]: Trace[2091120489]: ---"Objects listed" error: 13581ms (16:43:07.055) Dec 02 16:43:07 crc kubenswrapper[4747]: Trace[2091120489]: [13.581511545s] [13.581511545s] END Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.055815 4747 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.055862 4747 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.056170 4747 trace.go:236] Trace[123410110]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 16:42:53.326) (total time: 13729ms): Dec 02 16:43:07 crc kubenswrapper[4747]: Trace[123410110]: ---"Objects listed" error: 13729ms (16:43:07.056) Dec 02 16:43:07 crc kubenswrapper[4747]: Trace[123410110]: [13.729476s] [13.729476s] END Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.056196 4747 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.058272 4747 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.702537 4747 apiserver.go:52] "Watching apiserver" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.706300 4747 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.706688 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h"] Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.707162 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.707187 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.707329 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.707376 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.707518 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.707566 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.707640 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.707695 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.707803 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.708452 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.708811 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.709382 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.710008 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.710038 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.710088 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.710502 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.710783 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.711235 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.737197 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.752431 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.766559 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.768640 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.771949 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.780431 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.780788 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.798533 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.802497 4747 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.811326 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.821584 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.829766 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.837408 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.847133 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.856534 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859521 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859562 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859584 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859601 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859619 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859635 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859670 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859696 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859713 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859729 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859765 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859785 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859803 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859818 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859833 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859850 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859868 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859898 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859952 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859970 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859984 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.859999 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860006 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860015 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860071 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860119 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860139 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860158 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860175 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860193 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860210 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860227 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860244 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860260 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860277 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860293 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860316 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860334 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860363 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860393 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860412 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860431 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860451 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860471 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860498 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860517 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860536 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860554 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860574 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860591 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860608 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860626 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860644 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860660 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860678 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860695 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860711 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860731 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860748 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860763 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860781 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860798 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860817 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860840 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860857 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860874 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860892 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860931 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860948 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.860969 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861003 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861035 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861061 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861066 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861087 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861111 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861135 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861159 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861183 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861280 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861310 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861333 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861354 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861377 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861403 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861429 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861457 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861481 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861507 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861532 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861555 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861578 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861596 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861612 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861632 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861649 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861666 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861687 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861704 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861722 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861739 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861756 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861772 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861790 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861808 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861827 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861863 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861928 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861954 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.861978 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862000 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862021 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862043 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862065 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862091 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862115 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862140 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862162 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862182 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862208 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862230 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862249 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862256 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862308 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862338 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862429 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862457 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862484 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862514 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862540 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862568 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862591 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862614 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862641 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862665 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862688 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862727 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862750 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862773 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862796 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862819 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862843 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862872 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862894 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862937 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863017 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863041 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863064 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863086 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863107 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863133 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863155 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863179 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863203 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863225 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863248 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863269 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863292 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863317 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863344 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863367 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863391 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863414 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863441 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863466 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863568 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863595 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863617 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863642 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863671 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863697 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863720 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863748 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863773 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863797 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863821 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863846 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863874 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863898 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863946 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863972 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863997 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864022 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864048 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864076 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864146 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864171 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864197 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864221 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864245 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864269 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864290 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864312 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864338 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864366 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864392 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864416 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864441 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864488 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864539 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864563 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864587 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864608 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864633 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864664 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864700 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864725 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864754 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864775 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864793 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864813 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864834 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864886 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864900 4747 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864933 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.868707 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862266 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862687 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862741 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862762 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862440 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863168 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.862958 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.870119 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.870887 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.871072 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.871081 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863719 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863747 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864013 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864071 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864338 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864383 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.864864 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.865146 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.865428 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.863675 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.865569 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.865660 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.865874 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.865942 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.866015 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.866115 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.871350 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.871494 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.871533 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.871617 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.872028 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.872283 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.872280 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.866352 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.866605 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.866622 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.866780 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.867074 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.867164 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.867345 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.867477 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.867532 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.867696 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.867691 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.867719 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.867777 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.867976 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.869282 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.867600 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.872667 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.869625 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.869754 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.865828 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.873235 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.873337 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.873766 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.875088 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.875208 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.875215 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.875316 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.875303 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.875419 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.875514 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.875681 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.875850 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.875851 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.876064 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.876227 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.876799 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.866261 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.876836 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.877049 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.877071 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.877262 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.877438 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.877475 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.877666 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.877721 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.877685 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.878016 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.878244 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.878400 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.878514 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:43:08.378493673 +0000 UTC m=+18.905382422 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.878534 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.878581 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.878658 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.878788 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.879031 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.879210 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.879378 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.879687 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.879701 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.879838 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.879985 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.880134 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.880308 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.880340 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.880380 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.880385 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.880547 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.880760 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.880813 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.881253 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.881260 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.881196 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.881381 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.881619 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.881710 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.881923 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.870892 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.882004 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.882246 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.882310 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.882392 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.882447 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:08.382431243 +0000 UTC m=+18.909319992 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.882598 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.882953 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.883067 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.883224 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.883236 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.883242 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.883325 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.883423 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.883601 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.883620 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.883690 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.883757 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:08.38374754 +0000 UTC m=+18.910636289 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.883694 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.883886 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.884194 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.885138 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.885360 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.885432 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.882707 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.886658 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.889528 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.889612 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.889865 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.889988 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.890952 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.891045 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.891197 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.891224 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.891043 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.891422 4747 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.892223 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5" exitCode=255 Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.893002 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.893198 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.893332 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.893545 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5"} Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.895499 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.895844 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.896123 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.896152 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.896346 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.896383 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.896403 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.896532 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:08.396502076 +0000 UTC m=+18.923391005 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.897215 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.897614 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.898706 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.898757 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.900168 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.901185 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.901263 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.901800 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.902788 4747 scope.go:117] "RemoveContainer" containerID="93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.902980 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.903120 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.903168 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.903194 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:07 crc kubenswrapper[4747]: E1202 16:43:07.903281 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:08.403253094 +0000 UTC m=+18.930142033 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.904178 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.909106 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.909138 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.909211 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.909324 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.909218 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.911782 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.912214 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.912731 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.912777 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.912895 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.913085 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.913459 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.913598 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.913634 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.913657 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.913696 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.913766 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.914211 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.914230 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.914307 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.914712 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.914727 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.915052 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.915129 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.915166 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.915421 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.915558 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.915710 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.916109 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.916735 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.916743 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.916813 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.916846 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.917108 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.917109 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.917176 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.917484 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.917530 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.917653 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.917847 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.917856 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.918412 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.918842 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.922742 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.925008 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.945111 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.950496 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.957119 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.965670 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.965732 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.965827 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.966242 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.966726 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.965842 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.966821 4747 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.966834 4747 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.966845 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.966858 4747 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.966869 4747 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.966881 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.966923 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.966894 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.966937 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967021 4747 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967044 4747 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967061 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967098 4747 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967115 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967128 4747 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967220 4747 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967236 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967273 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967288 4747 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967303 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967317 4747 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967352 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967369 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967384 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967421 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967439 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967454 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967470 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967507 4747 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967523 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967537 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967550 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967585 4747 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967598 4747 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967612 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967625 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967659 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967677 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967690 4747 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967703 4747 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967716 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967752 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967767 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967781 4747 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967796 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967835 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967850 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967863 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967877 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967940 4747 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967954 4747 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967966 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.967979 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.968018 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.968066 4747 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.968112 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.968131 4747 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.968144 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.968182 4747 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.968762 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.968835 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.968891 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.968979 4747 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.969043 4747 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.969107 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.969178 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.969239 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.969307 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.969371 4747 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.969431 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.969493 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.969546 4747 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.969599 4747 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.969649 4747 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.969703 4747 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.969763 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.969823 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.969879 4747 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.969984 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.970065 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.970123 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.970182 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.970236 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.970293 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.970351 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.970432 4747 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.970536 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.970620 4747 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.970697 4747 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.970768 4747 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.970844 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.970937 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.971029 4747 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.971088 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.971143 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.971196 4747 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.971506 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.971591 4747 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.971673 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.971751 4747 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.971822 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.971891 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.972005 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.972107 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.972184 4747 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.972264 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.972349 4747 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.972426 4747 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.972507 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.972587 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.972665 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.972749 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.972832 4747 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.972892 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.972975 4747 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973069 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973106 4747 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973447 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973476 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973491 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973502 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973514 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973526 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973538 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973548 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973559 4747 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973568 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973579 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973589 4747 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973598 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973608 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973620 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973634 4747 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973647 4747 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973660 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973673 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973685 4747 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973698 4747 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973711 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973724 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973736 4747 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973750 4747 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973763 4747 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973778 4747 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973797 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973809 4747 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973819 4747 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973832 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973845 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973855 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973864 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973874 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973885 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973895 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973922 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973932 4747 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973943 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973954 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973965 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973977 4747 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973987 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.973998 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974008 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974018 4747 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974028 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974038 4747 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974050 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974060 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974069 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974081 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974091 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974102 4747 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974113 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974124 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974134 4747 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974144 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974154 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974165 4747 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974176 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974185 4747 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974196 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974206 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974216 4747 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974225 4747 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974238 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974250 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974262 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974284 4747 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974299 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.974309 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.986246 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:07 crc kubenswrapper[4747]: I1202 16:43:07.996847 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.012025 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.021977 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.023294 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.030752 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.033085 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 16:43:08 crc kubenswrapper[4747]: W1202 16:43:08.034760 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-aab4664fd15756d90886ef1a3774a74c310a554560041523c4f761e15e0cf279 WatchSource:0}: Error finding container aab4664fd15756d90886ef1a3774a74c310a554560041523c4f761e15e0cf279: Status 404 returned error can't find the container with id aab4664fd15756d90886ef1a3774a74c310a554560041523c4f761e15e0cf279 Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.037271 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 16:43:08 crc kubenswrapper[4747]: W1202 16:43:08.053000 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-ac969dfe467c2c94a7c609ca98b9257c21b31efbafb3586ea6aa29d56c2f6012 WatchSource:0}: Error finding container ac969dfe467c2c94a7c609ca98b9257c21b31efbafb3586ea6aa29d56c2f6012: Status 404 returned error can't find the container with id ac969dfe467c2c94a7c609ca98b9257c21b31efbafb3586ea6aa29d56c2f6012 Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.074799 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.478087 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.478182 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.478214 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.478243 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.478276 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:08 crc kubenswrapper[4747]: E1202 16:43:08.478307 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:43:09.47827308 +0000 UTC m=+20.005161869 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:43:08 crc kubenswrapper[4747]: E1202 16:43:08.478430 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 16:43:08 crc kubenswrapper[4747]: E1202 16:43:08.478432 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 16:43:08 crc kubenswrapper[4747]: E1202 16:43:08.478446 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 16:43:08 crc kubenswrapper[4747]: E1202 16:43:08.478460 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:08 crc kubenswrapper[4747]: E1202 16:43:08.478462 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 16:43:08 crc kubenswrapper[4747]: E1202 16:43:08.478481 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:08 crc kubenswrapper[4747]: E1202 16:43:08.478485 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 16:43:08 crc kubenswrapper[4747]: E1202 16:43:08.478571 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 16:43:08 crc kubenswrapper[4747]: E1202 16:43:08.478502 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:09.478492066 +0000 UTC m=+20.005380815 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:08 crc kubenswrapper[4747]: E1202 16:43:08.478618 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:09.47860911 +0000 UTC m=+20.005497869 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:08 crc kubenswrapper[4747]: E1202 16:43:08.478632 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:09.4786243 +0000 UTC m=+20.005513169 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 16:43:08 crc kubenswrapper[4747]: E1202 16:43:08.478643 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:09.478637691 +0000 UTC m=+20.005526450 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.896441 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c"} Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.896502 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263"} Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.896515 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"ac969dfe467c2c94a7c609ca98b9257c21b31efbafb3586ea6aa29d56c2f6012"} Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.898368 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf"} Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.898415 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"aab4664fd15756d90886ef1a3774a74c310a554560041523c4f761e15e0cf279"} Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.900499 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.902030 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88"} Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.902419 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.903154 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"17d69b223dedca2bfd5e4c3bc4c42dda7700b05a02e496fd5893807c48136242"} Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.963941 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:08Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.980522 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:08Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:08 crc kubenswrapper[4747]: I1202 16:43:08.997571 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:08Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.012886 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.025778 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.038877 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.050784 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.065644 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.488141 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.488313 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:09 crc kubenswrapper[4747]: E1202 16:43:09.488352 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:43:11.488317996 +0000 UTC m=+22.015206745 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.488414 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:09 crc kubenswrapper[4747]: E1202 16:43:09.488453 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 16:43:09 crc kubenswrapper[4747]: E1202 16:43:09.488561 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:11.488531902 +0000 UTC m=+22.015420691 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 16:43:09 crc kubenswrapper[4747]: E1202 16:43:09.488580 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.488469 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:09 crc kubenswrapper[4747]: E1202 16:43:09.488631 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:11.488619495 +0000 UTC m=+22.015508334 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.488657 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:09 crc kubenswrapper[4747]: E1202 16:43:09.488768 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 16:43:09 crc kubenswrapper[4747]: E1202 16:43:09.488784 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 16:43:09 crc kubenswrapper[4747]: E1202 16:43:09.488796 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:09 crc kubenswrapper[4747]: E1202 16:43:09.488823 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:11.48881541 +0000 UTC m=+22.015704249 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:09 crc kubenswrapper[4747]: E1202 16:43:09.488875 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 16:43:09 crc kubenswrapper[4747]: E1202 16:43:09.488887 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 16:43:09 crc kubenswrapper[4747]: E1202 16:43:09.488895 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:09 crc kubenswrapper[4747]: E1202 16:43:09.488944 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:11.488936293 +0000 UTC m=+22.015825162 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.759952 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.760020 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.759952 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:09 crc kubenswrapper[4747]: E1202 16:43:09.760115 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:09 crc kubenswrapper[4747]: E1202 16:43:09.760225 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:09 crc kubenswrapper[4747]: E1202 16:43:09.760339 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.769169 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.769709 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.771041 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.771654 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.772615 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.773145 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.773703 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.774589 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.775251 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.776165 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.776655 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.777805 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.777890 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.778368 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.778891 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.779873 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.780423 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.781518 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.782007 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.782610 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.783631 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.784133 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.785383 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.785941 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.787220 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.787759 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.788727 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.790706 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.791618 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.794348 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.795115 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.795779 4747 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.795950 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.796708 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.797798 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.798471 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.799024 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.800900 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.803333 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.804109 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.804884 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.805772 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.806420 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.807162 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.807885 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.808666 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.809282 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.812299 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.812920 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.814421 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.815012 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.815976 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.816480 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.817069 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.818231 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.818787 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.828170 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.848362 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.862655 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.875698 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.890125 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.907855 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.926118 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.940795 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.956086 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.969038 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.981691 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:09 crc kubenswrapper[4747]: I1202 16:43:09.995015 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.011889 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.026871 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.065384 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.092565 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.109138 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.125158 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.138617 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.157164 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.171410 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.185437 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.258621 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.260588 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.260643 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.260653 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.260724 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.266060 4747 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.266154 4747 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.267124 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.267152 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.267163 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.267180 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.267194 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:10Z","lastTransitionTime":"2025-12-02T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:10 crc kubenswrapper[4747]: E1202 16:43:10.290026 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.299056 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.299087 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.299095 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.299111 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.299123 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:10Z","lastTransitionTime":"2025-12-02T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:10 crc kubenswrapper[4747]: E1202 16:43:10.309757 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.313161 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.313203 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.313214 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.313229 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.313239 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:10Z","lastTransitionTime":"2025-12-02T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:10 crc kubenswrapper[4747]: E1202 16:43:10.323653 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.328855 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.328965 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.328979 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.329031 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.329046 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:10Z","lastTransitionTime":"2025-12-02T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:10 crc kubenswrapper[4747]: E1202 16:43:10.346356 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.351061 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.351112 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.351125 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.351144 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.351156 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:10Z","lastTransitionTime":"2025-12-02T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:10 crc kubenswrapper[4747]: E1202 16:43:10.363282 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:10 crc kubenswrapper[4747]: E1202 16:43:10.363411 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.365633 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.365685 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.365695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.365711 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.365739 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:10Z","lastTransitionTime":"2025-12-02T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.468472 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.468512 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.468520 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.468534 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.468544 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:10Z","lastTransitionTime":"2025-12-02T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.571596 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.571675 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.571695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.571722 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.571743 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:10Z","lastTransitionTime":"2025-12-02T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.674770 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.674878 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.674955 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.674990 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.675026 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:10Z","lastTransitionTime":"2025-12-02T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.777532 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.777612 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.777631 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.777668 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.777688 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:10Z","lastTransitionTime":"2025-12-02T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.880433 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.880501 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.880515 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.880536 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.880554 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:10Z","lastTransitionTime":"2025-12-02T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.983749 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.983837 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.983873 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.983954 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:10 crc kubenswrapper[4747]: I1202 16:43:10.983981 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:10Z","lastTransitionTime":"2025-12-02T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.086774 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.086839 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.086855 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.086879 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.086895 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:11Z","lastTransitionTime":"2025-12-02T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.189316 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.189352 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.189360 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.189373 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.189381 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:11Z","lastTransitionTime":"2025-12-02T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.292319 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.292376 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.292387 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.292404 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.292416 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:11Z","lastTransitionTime":"2025-12-02T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.395245 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.395290 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.395298 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.395314 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.395330 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:11Z","lastTransitionTime":"2025-12-02T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.497709 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.497750 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.497761 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.497780 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.497790 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:11Z","lastTransitionTime":"2025-12-02T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.509199 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.509259 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.509282 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.509303 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:11 crc kubenswrapper[4747]: E1202 16:43:11.509388 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:43:15.509355153 +0000 UTC m=+26.036243902 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:43:11 crc kubenswrapper[4747]: E1202 16:43:11.509410 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 16:43:11 crc kubenswrapper[4747]: E1202 16:43:11.509426 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 16:43:11 crc kubenswrapper[4747]: E1202 16:43:11.509449 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 16:43:11 crc kubenswrapper[4747]: E1202 16:43:11.509410 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 16:43:11 crc kubenswrapper[4747]: E1202 16:43:11.509471 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:15.509452576 +0000 UTC m=+26.036341395 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.509495 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:11 crc kubenswrapper[4747]: E1202 16:43:11.509509 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:15.509501457 +0000 UTC m=+26.036390206 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 16:43:11 crc kubenswrapper[4747]: E1202 16:43:11.509461 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:11 crc kubenswrapper[4747]: E1202 16:43:11.509544 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:15.509538788 +0000 UTC m=+26.036427537 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:11 crc kubenswrapper[4747]: E1202 16:43:11.509566 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 16:43:11 crc kubenswrapper[4747]: E1202 16:43:11.509582 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 16:43:11 crc kubenswrapper[4747]: E1202 16:43:11.509593 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:11 crc kubenswrapper[4747]: E1202 16:43:11.509635 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:15.509623501 +0000 UTC m=+26.036512330 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.600243 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.600287 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.600297 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.600316 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.600326 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:11Z","lastTransitionTime":"2025-12-02T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.703326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.703373 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.703385 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.703403 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.703414 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:11Z","lastTransitionTime":"2025-12-02T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.721707 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.733841 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.738243 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.738465 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:11Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.754555 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:11Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.760148 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.760181 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.760224 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:11 crc kubenswrapper[4747]: E1202 16:43:11.760325 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:11 crc kubenswrapper[4747]: E1202 16:43:11.760386 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:11 crc kubenswrapper[4747]: E1202 16:43:11.760477 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.775677 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:11Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.801601 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:11Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.806557 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.806618 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.806635 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.806658 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.806672 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:11Z","lastTransitionTime":"2025-12-02T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.838499 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:11Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.858274 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:11Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.877815 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:11Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.908710 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.908755 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.908766 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.908782 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.908793 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:11Z","lastTransitionTime":"2025-12-02T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.912768 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:11Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.915115 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b"} Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.924377 4747 csr.go:261] certificate signing request csr-j6lvk is approved, waiting to be issued Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.930313 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:11Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.943412 4747 csr.go:257] certificate signing request csr-j6lvk is issued Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.962998 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:11Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:11 crc kubenswrapper[4747]: I1202 16:43:11.979390 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:11Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.005945 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.015494 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.015563 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.015577 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.015601 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.015623 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:12Z","lastTransitionTime":"2025-12-02T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.037277 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.057511 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.077986 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.092681 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.106327 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.118263 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.118307 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.118321 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.118338 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.118352 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:12Z","lastTransitionTime":"2025-12-02T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.123978 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.137184 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.165524 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.179308 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.191817 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.214790 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.220565 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.220627 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.220642 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.220658 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.220670 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:12Z","lastTransitionTime":"2025-12-02T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.243563 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.277943 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.302142 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.323661 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.323695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.323705 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.323721 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.323731 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:12Z","lastTransitionTime":"2025-12-02T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.426439 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.426498 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.426524 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.426546 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.426559 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:12Z","lastTransitionTime":"2025-12-02T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.529331 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.529389 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.529403 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.529445 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.529459 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:12Z","lastTransitionTime":"2025-12-02T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.631458 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.631493 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.631503 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.631517 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.631527 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:12Z","lastTransitionTime":"2025-12-02T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.720773 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-m5zcc"] Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.721161 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.721218 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-tsbf6"] Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.721593 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-scr52"] Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.721717 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.721814 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-scr52" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.724851 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.724897 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.724851 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.725133 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.725256 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.725283 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.725289 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.725427 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.725450 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.725477 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.727723 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.727730 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.727785 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.733210 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.733252 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.733263 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.733280 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.733290 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:12Z","lastTransitionTime":"2025-12-02T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.741220 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.760032 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.773245 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.787300 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.802439 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.823975 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xn7k7\" (UniqueName: \"kubernetes.io/projected/444e1e59-7bc7-44cd-bb37-ed903442b724-kube-api-access-xn7k7\") pod \"machine-config-daemon-m5zcc\" (UID: \"444e1e59-7bc7-44cd-bb37-ed903442b724\") " pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824036 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-multus-cni-dir\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824063 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nr2f9\" (UniqueName: \"kubernetes.io/projected/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-kube-api-access-nr2f9\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824102 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/444e1e59-7bc7-44cd-bb37-ed903442b724-mcd-auth-proxy-config\") pod \"machine-config-daemon-m5zcc\" (UID: \"444e1e59-7bc7-44cd-bb37-ed903442b724\") " pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824131 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/53ad3580-be8b-44c9-b657-3fa2cfd54956-hosts-file\") pod \"node-resolver-scr52\" (UID: \"53ad3580-be8b-44c9-b657-3fa2cfd54956\") " pod="openshift-dns/node-resolver-scr52" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824157 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-os-release\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824179 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-run-k8s-cni-cncf-io\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824199 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-multus-conf-dir\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824220 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-hostroot\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824242 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/444e1e59-7bc7-44cd-bb37-ed903442b724-proxy-tls\") pod \"machine-config-daemon-m5zcc\" (UID: \"444e1e59-7bc7-44cd-bb37-ed903442b724\") " pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824261 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-multus-socket-dir-parent\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824279 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-multus-daemon-config\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824298 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-run-multus-certs\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824327 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-var-lib-cni-multus\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824349 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzk2t\" (UniqueName: \"kubernetes.io/projected/53ad3580-be8b-44c9-b657-3fa2cfd54956-kube-api-access-lzk2t\") pod \"node-resolver-scr52\" (UID: \"53ad3580-be8b-44c9-b657-3fa2cfd54956\") " pod="openshift-dns/node-resolver-scr52" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824367 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-system-cni-dir\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824432 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-cnibin\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824452 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-run-netns\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824470 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-var-lib-kubelet\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824489 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-etc-kubernetes\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824519 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/444e1e59-7bc7-44cd-bb37-ed903442b724-rootfs\") pod \"machine-config-daemon-m5zcc\" (UID: \"444e1e59-7bc7-44cd-bb37-ed903442b724\") " pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824537 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-cni-binary-copy\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.824558 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-var-lib-cni-bin\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.830552 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.836214 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.836256 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.836267 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.836282 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.836299 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:12Z","lastTransitionTime":"2025-12-02T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.866201 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.890607 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.905676 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.922362 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925346 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/444e1e59-7bc7-44cd-bb37-ed903442b724-rootfs\") pod \"machine-config-daemon-m5zcc\" (UID: \"444e1e59-7bc7-44cd-bb37-ed903442b724\") " pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925514 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/444e1e59-7bc7-44cd-bb37-ed903442b724-rootfs\") pod \"machine-config-daemon-m5zcc\" (UID: \"444e1e59-7bc7-44cd-bb37-ed903442b724\") " pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925527 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-cni-binary-copy\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925624 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-var-lib-cni-bin\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925673 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xn7k7\" (UniqueName: \"kubernetes.io/projected/444e1e59-7bc7-44cd-bb37-ed903442b724-kube-api-access-xn7k7\") pod \"machine-config-daemon-m5zcc\" (UID: \"444e1e59-7bc7-44cd-bb37-ed903442b724\") " pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925702 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-multus-cni-dir\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925729 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nr2f9\" (UniqueName: \"kubernetes.io/projected/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-kube-api-access-nr2f9\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925737 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-var-lib-cni-bin\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925771 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/444e1e59-7bc7-44cd-bb37-ed903442b724-mcd-auth-proxy-config\") pod \"machine-config-daemon-m5zcc\" (UID: \"444e1e59-7bc7-44cd-bb37-ed903442b724\") " pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925801 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/53ad3580-be8b-44c9-b657-3fa2cfd54956-hosts-file\") pod \"node-resolver-scr52\" (UID: \"53ad3580-be8b-44c9-b657-3fa2cfd54956\") " pod="openshift-dns/node-resolver-scr52" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925829 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-os-release\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925854 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-run-k8s-cni-cncf-io\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925879 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-multus-conf-dir\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925941 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/444e1e59-7bc7-44cd-bb37-ed903442b724-proxy-tls\") pod \"machine-config-daemon-m5zcc\" (UID: \"444e1e59-7bc7-44cd-bb37-ed903442b724\") " pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925947 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/53ad3580-be8b-44c9-b657-3fa2cfd54956-hosts-file\") pod \"node-resolver-scr52\" (UID: \"53ad3580-be8b-44c9-b657-3fa2cfd54956\") " pod="openshift-dns/node-resolver-scr52" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925969 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-hostroot\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925994 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-os-release\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926020 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-multus-socket-dir-parent\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926049 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-multus-daemon-config\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926011 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-hostroot\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926053 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-multus-conf-dir\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.925988 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-run-k8s-cni-cncf-io\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926070 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-run-multus-certs\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926093 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-run-multus-certs\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926113 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-multus-socket-dir-parent\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926171 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-multus-cni-dir\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926178 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-var-lib-cni-multus\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926224 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzk2t\" (UniqueName: \"kubernetes.io/projected/53ad3580-be8b-44c9-b657-3fa2cfd54956-kube-api-access-lzk2t\") pod \"node-resolver-scr52\" (UID: \"53ad3580-be8b-44c9-b657-3fa2cfd54956\") " pod="openshift-dns/node-resolver-scr52" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926239 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-system-cni-dir\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926256 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-cnibin\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926265 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-var-lib-cni-multus\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926300 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-run-netns\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926303 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-system-cni-dir\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926272 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-run-netns\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926363 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-var-lib-kubelet\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926327 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-cnibin\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926395 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-etc-kubernetes\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926418 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-host-var-lib-kubelet\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926426 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-etc-kubernetes\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926795 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/444e1e59-7bc7-44cd-bb37-ed903442b724-mcd-auth-proxy-config\") pod \"machine-config-daemon-m5zcc\" (UID: \"444e1e59-7bc7-44cd-bb37-ed903442b724\") " pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.926887 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-multus-daemon-config\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.927859 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-cni-binary-copy\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.936241 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/444e1e59-7bc7-44cd-bb37-ed903442b724-proxy-tls\") pod \"machine-config-daemon-m5zcc\" (UID: \"444e1e59-7bc7-44cd-bb37-ed903442b724\") " pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.938515 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.938542 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.938551 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.938567 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.938576 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:12Z","lastTransitionTime":"2025-12-02T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.942432 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.945282 4747 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-02 16:38:11 +0000 UTC, rotation deadline is 2026-10-12 19:18:03.34228628 +0000 UTC Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.945354 4747 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7538h34m50.396934935s for next certificate rotation Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.945882 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nr2f9\" (UniqueName: \"kubernetes.io/projected/de9e6dbe-5eb1-40b7-8ddf-a8df9977153a-kube-api-access-nr2f9\") pod \"multus-tsbf6\" (UID: \"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\") " pod="openshift-multus/multus-tsbf6" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.949367 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzk2t\" (UniqueName: \"kubernetes.io/projected/53ad3580-be8b-44c9-b657-3fa2cfd54956-kube-api-access-lzk2t\") pod \"node-resolver-scr52\" (UID: \"53ad3580-be8b-44c9-b657-3fa2cfd54956\") " pod="openshift-dns/node-resolver-scr52" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.956347 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xn7k7\" (UniqueName: \"kubernetes.io/projected/444e1e59-7bc7-44cd-bb37-ed903442b724-kube-api-access-xn7k7\") pod \"machine-config-daemon-m5zcc\" (UID: \"444e1e59-7bc7-44cd-bb37-ed903442b724\") " pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.957872 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.970443 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.983248 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:12 crc kubenswrapper[4747]: I1202 16:43:12.996856 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:12Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.017542 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.029239 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.036974 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.042594 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.042861 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.042993 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.043121 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.043207 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:13Z","lastTransitionTime":"2025-12-02T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.046274 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.047704 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-tsbf6" Dec 02 16:43:13 crc kubenswrapper[4747]: W1202 16:43:13.050664 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod444e1e59_7bc7_44cd_bb37_ed903442b724.slice/crio-c2033f0689d33dce90139fdd2a0f031b88471be29865fb6fd471a5da2f909173 WatchSource:0}: Error finding container c2033f0689d33dce90139fdd2a0f031b88471be29865fb6fd471a5da2f909173: Status 404 returned error can't find the container with id c2033f0689d33dce90139fdd2a0f031b88471be29865fb6fd471a5da2f909173 Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.056591 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-scr52" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.062457 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: W1202 16:43:13.071670 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod53ad3580_be8b_44c9_b657_3fa2cfd54956.slice/crio-51e4c5e09fb0ee37dd6b579a038a576dfc954365019f9684568b1f1c01554a00 WatchSource:0}: Error finding container 51e4c5e09fb0ee37dd6b579a038a576dfc954365019f9684568b1f1c01554a00: Status 404 returned error can't find the container with id 51e4c5e09fb0ee37dd6b579a038a576dfc954365019f9684568b1f1c01554a00 Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.075568 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.090023 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.111536 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-q2z9c"] Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.113355 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.116755 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.117018 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.118808 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-zmcxm"] Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.119864 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.123406 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.123922 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.124812 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.124849 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.125066 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.125201 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.125232 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.125294 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.139346 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.147732 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.147772 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.147785 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.147802 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.147813 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:13Z","lastTransitionTime":"2025-12-02T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.154957 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.169892 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.187195 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.205790 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.222155 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229086 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msnk2\" (UniqueName: \"kubernetes.io/projected/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-kube-api-access-msnk2\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229137 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-node-log\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229165 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-cni-bin\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229187 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xd24c\" (UniqueName: \"kubernetes.io/projected/b62a2b51-3b8a-4786-97ee-01d2c6332c83-kube-api-access-xd24c\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229223 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-etc-openvswitch\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229246 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-ovn\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229345 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-run-netns\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229368 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-slash\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229388 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovn-node-metrics-cert\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229408 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-systemd-units\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229430 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-var-lib-openvswitch\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229453 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-env-overrides\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229474 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-kubelet\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229493 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-openvswitch\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229515 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-system-cni-dir\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229536 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-run-ovn-kubernetes\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229556 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovnkube-config\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229580 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-cnibin\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229602 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-tuning-conf-dir\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229626 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-cni-netd\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229652 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-cni-binary-copy\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229673 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229699 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229723 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-os-release\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229743 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-log-socket\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229784 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovnkube-script-lib\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.229823 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-systemd\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.241453 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.249980 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.250277 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.250352 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.250417 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.250479 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:13Z","lastTransitionTime":"2025-12-02T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.254696 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.267283 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.280544 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.294765 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.316797 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.331082 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-slash\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.331127 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovn-node-metrics-cert\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.331146 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-systemd-units\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.331166 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-var-lib-openvswitch\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.331189 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-env-overrides\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.331197 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-slash\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.331228 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-kubelet\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.331243 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-openvswitch\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.331253 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-systemd-units\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.331264 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-system-cni-dir\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.331293 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-system-cni-dir\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.331322 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-kubelet\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.331343 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-openvswitch\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.331475 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-var-lib-openvswitch\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332009 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-env-overrides\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332078 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-run-ovn-kubernetes\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332105 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovnkube-config\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332129 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-cnibin\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332155 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-tuning-conf-dir\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332182 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-cni-binary-copy\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332205 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332223 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-cni-netd\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332240 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-os-release\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332256 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-log-socket\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332278 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332317 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovnkube-script-lib\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332346 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-systemd\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332369 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msnk2\" (UniqueName: \"kubernetes.io/projected/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-kube-api-access-msnk2\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332423 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-node-log\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332458 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xd24c\" (UniqueName: \"kubernetes.io/projected/b62a2b51-3b8a-4786-97ee-01d2c6332c83-kube-api-access-xd24c\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332487 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-etc-openvswitch\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332503 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-cni-bin\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332520 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-run-netns\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332539 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-ovn\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332594 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-ovn\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.332632 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-run-ovn-kubernetes\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.333009 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.333143 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovnkube-config\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.333266 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-cnibin\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.333439 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-tuning-conf-dir\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.333706 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovnkube-script-lib\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.333764 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-systemd\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.334144 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-node-log\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.334308 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-etc-openvswitch\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.334366 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-cni-bin\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.334404 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-run-netns\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.334646 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.334676 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-cni-binary-copy\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.334860 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-os-release\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.334948 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-cni-netd\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.335015 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-log-socket\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.335563 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.335865 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovn-node-metrics-cert\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.348984 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.351485 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msnk2\" (UniqueName: \"kubernetes.io/projected/bc9a3ea0-15a0-4198-808a-b3bd9a9f4527-kube-api-access-msnk2\") pod \"multus-additional-cni-plugins-q2z9c\" (UID: \"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\") " pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.352834 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.352991 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.353068 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.353138 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.353228 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:13Z","lastTransitionTime":"2025-12-02T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.353084 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xd24c\" (UniqueName: \"kubernetes.io/projected/b62a2b51-3b8a-4786-97ee-01d2c6332c83-kube-api-access-xd24c\") pod \"ovnkube-node-zmcxm\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.428817 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.435712 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:13 crc kubenswrapper[4747]: W1202 16:43:13.442615 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbc9a3ea0_15a0_4198_808a_b3bd9a9f4527.slice/crio-dee1d6c438f653ad91149f5872737165f2e886a52a88433b8c3ff688d1064811 WatchSource:0}: Error finding container dee1d6c438f653ad91149f5872737165f2e886a52a88433b8c3ff688d1064811: Status 404 returned error can't find the container with id dee1d6c438f653ad91149f5872737165f2e886a52a88433b8c3ff688d1064811 Dec 02 16:43:13 crc kubenswrapper[4747]: W1202 16:43:13.454425 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb62a2b51_3b8a_4786_97ee_01d2c6332c83.slice/crio-276e654dfaa1bb645200da477315585256fdd1921fd69652581b6ef4f5976d23 WatchSource:0}: Error finding container 276e654dfaa1bb645200da477315585256fdd1921fd69652581b6ef4f5976d23: Status 404 returned error can't find the container with id 276e654dfaa1bb645200da477315585256fdd1921fd69652581b6ef4f5976d23 Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.455594 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.455627 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.455637 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.455653 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.455665 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:13Z","lastTransitionTime":"2025-12-02T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.567691 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.567751 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.567771 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.567791 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.567811 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:13Z","lastTransitionTime":"2025-12-02T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.671219 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.671420 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.671432 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.671450 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.671464 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:13Z","lastTransitionTime":"2025-12-02T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.760520 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:13 crc kubenswrapper[4747]: E1202 16:43:13.760667 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.761110 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:13 crc kubenswrapper[4747]: E1202 16:43:13.761163 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.761207 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:13 crc kubenswrapper[4747]: E1202 16:43:13.761250 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.775047 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.775088 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.775099 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.775117 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.775129 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:13Z","lastTransitionTime":"2025-12-02T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.878337 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.878374 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.878385 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.878400 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.878409 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:13Z","lastTransitionTime":"2025-12-02T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.921545 4747 generic.go:334] "Generic (PLEG): container finished" podID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerID="2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709" exitCode=0 Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.921644 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerDied","Data":"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.921711 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerStarted","Data":"276e654dfaa1bb645200da477315585256fdd1921fd69652581b6ef4f5976d23"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.922973 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" event={"ID":"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527","Type":"ContainerStarted","Data":"dee1d6c438f653ad91149f5872737165f2e886a52a88433b8c3ff688d1064811"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.924735 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-scr52" event={"ID":"53ad3580-be8b-44c9-b657-3fa2cfd54956","Type":"ContainerStarted","Data":"ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.924767 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-scr52" event={"ID":"53ad3580-be8b-44c9-b657-3fa2cfd54956","Type":"ContainerStarted","Data":"51e4c5e09fb0ee37dd6b579a038a576dfc954365019f9684568b1f1c01554a00"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.926077 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tsbf6" event={"ID":"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a","Type":"ContainerStarted","Data":"80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.926125 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tsbf6" event={"ID":"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a","Type":"ContainerStarted","Data":"3ebc5d1903321382cd4635cfb001a0499fd69b1b4d0b0907509667b8ddde593e"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.927600 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.927626 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.927636 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"c2033f0689d33dce90139fdd2a0f031b88471be29865fb6fd471a5da2f909173"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.936730 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.948078 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.960151 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.973295 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.981251 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.981308 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.981320 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.981342 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.981353 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:13Z","lastTransitionTime":"2025-12-02T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.986619 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:13 crc kubenswrapper[4747]: I1202 16:43:13.998457 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:13Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.014335 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.034157 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.049339 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.064327 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.078481 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.084011 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.084046 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.084058 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.084078 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.084089 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:14Z","lastTransitionTime":"2025-12-02T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.094497 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.114843 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.130724 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.149294 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.161531 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.173597 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.187129 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.187888 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.187953 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.187964 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.187984 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.187997 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:14Z","lastTransitionTime":"2025-12-02T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.228257 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.243305 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.259647 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.274701 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.290461 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.291091 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.291141 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.291154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.291171 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.291472 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:14Z","lastTransitionTime":"2025-12-02T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.306890 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.321525 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.334699 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.355020 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.382459 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.394761 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.394828 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.394844 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.394867 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.394973 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:14Z","lastTransitionTime":"2025-12-02T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.497760 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.497815 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.497830 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.497851 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.497862 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:14Z","lastTransitionTime":"2025-12-02T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.600469 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.600824 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.600940 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.601034 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.601141 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:14Z","lastTransitionTime":"2025-12-02T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.703878 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.704441 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.704454 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.704475 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.704491 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:14Z","lastTransitionTime":"2025-12-02T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.807355 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.807403 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.807413 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.807429 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.807439 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:14Z","lastTransitionTime":"2025-12-02T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.910020 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.910076 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.910089 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.910108 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.910121 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:14Z","lastTransitionTime":"2025-12-02T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.934214 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerStarted","Data":"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091"} Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.934287 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerStarted","Data":"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1"} Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.936179 4747 generic.go:334] "Generic (PLEG): container finished" podID="bc9a3ea0-15a0-4198-808a-b3bd9a9f4527" containerID="a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba" exitCode=0 Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.936239 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" event={"ID":"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527","Type":"ContainerDied","Data":"a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba"} Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.951842 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.965587 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:14 crc kubenswrapper[4747]: I1202 16:43:14.985038 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:14Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.005277 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:15Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.012642 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.012680 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.012689 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.012705 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.012717 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:15Z","lastTransitionTime":"2025-12-02T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.019724 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:15Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.032132 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:15Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.045093 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:15Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.057864 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:15Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.081111 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:15Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.098211 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:15Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.114843 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.114894 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.114931 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.114953 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.114965 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:15Z","lastTransitionTime":"2025-12-02T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.118264 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:15Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.134404 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:15Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.150356 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:15Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.173792 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:15Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.217111 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.217141 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.217149 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.217166 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.217176 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:15Z","lastTransitionTime":"2025-12-02T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.319680 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.319732 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.319745 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.319760 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.319771 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:15Z","lastTransitionTime":"2025-12-02T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.422614 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.422664 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.422674 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.422693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.422702 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:15Z","lastTransitionTime":"2025-12-02T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.526244 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.526306 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.526329 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.526353 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.526369 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:15Z","lastTransitionTime":"2025-12-02T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.556212 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.556310 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.556336 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.556361 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.556385 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:15 crc kubenswrapper[4747]: E1202 16:43:15.556464 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 16:43:15 crc kubenswrapper[4747]: E1202 16:43:15.556536 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 16:43:15 crc kubenswrapper[4747]: E1202 16:43:15.556554 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 16:43:15 crc kubenswrapper[4747]: E1202 16:43:15.556567 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:15 crc kubenswrapper[4747]: E1202 16:43:15.556619 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:23.556604693 +0000 UTC m=+34.083493442 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:15 crc kubenswrapper[4747]: E1202 16:43:15.556639 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:43:23.556629893 +0000 UTC m=+34.083518642 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:43:15 crc kubenswrapper[4747]: E1202 16:43:15.556651 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:23.556645424 +0000 UTC m=+34.083534163 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 16:43:15 crc kubenswrapper[4747]: E1202 16:43:15.556687 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 16:43:15 crc kubenswrapper[4747]: E1202 16:43:15.556701 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 16:43:15 crc kubenswrapper[4747]: E1202 16:43:15.556703 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 16:43:15 crc kubenswrapper[4747]: E1202 16:43:15.556713 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:15 crc kubenswrapper[4747]: E1202 16:43:15.556756 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:23.556739436 +0000 UTC m=+34.083628185 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:15 crc kubenswrapper[4747]: E1202 16:43:15.556770 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:23.556764967 +0000 UTC m=+34.083653716 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.628951 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.628987 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.628997 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.629014 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.629025 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:15Z","lastTransitionTime":"2025-12-02T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.732041 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.732099 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.732112 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.732134 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.732145 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:15Z","lastTransitionTime":"2025-12-02T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.759493 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.759514 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.759534 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:15 crc kubenswrapper[4747]: E1202 16:43:15.759668 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:15 crc kubenswrapper[4747]: E1202 16:43:15.759761 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:15 crc kubenswrapper[4747]: E1202 16:43:15.759860 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.835092 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.835218 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.835255 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.835273 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.835299 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:15Z","lastTransitionTime":"2025-12-02T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.937125 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.937160 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.937171 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.937188 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.937199 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:15Z","lastTransitionTime":"2025-12-02T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.945593 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerStarted","Data":"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137"} Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.945640 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerStarted","Data":"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c"} Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.945650 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerStarted","Data":"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad"} Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.945658 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerStarted","Data":"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8"} Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.947628 4747 generic.go:334] "Generic (PLEG): container finished" podID="bc9a3ea0-15a0-4198-808a-b3bd9a9f4527" containerID="44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10" exitCode=0 Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.947679 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" event={"ID":"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527","Type":"ContainerDied","Data":"44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10"} Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.977220 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:15Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:15 crc kubenswrapper[4747]: I1202 16:43:15.993497 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:15Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.008397 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.027435 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.040773 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.040817 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.040829 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.040847 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.040876 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:16Z","lastTransitionTime":"2025-12-02T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.042715 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.059625 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.077324 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.094675 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.115709 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.131641 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.145105 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.145183 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.145198 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.145220 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.145236 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:16Z","lastTransitionTime":"2025-12-02T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.145878 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.159759 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.178722 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.191065 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.248230 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.248279 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.248291 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.248308 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.248320 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:16Z","lastTransitionTime":"2025-12-02T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.351088 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.351133 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.351142 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.351159 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.351168 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:16Z","lastTransitionTime":"2025-12-02T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.453710 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.453761 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.453775 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.453797 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.453812 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:16Z","lastTransitionTime":"2025-12-02T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.464715 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-c45mn"] Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.465207 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-c45mn" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.465796 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6m59s\" (UniqueName: \"kubernetes.io/projected/8e33e4be-7329-468e-aee4-dc4b5fe081ba-kube-api-access-6m59s\") pod \"node-ca-c45mn\" (UID: \"8e33e4be-7329-468e-aee4-dc4b5fe081ba\") " pod="openshift-image-registry/node-ca-c45mn" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.465899 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/8e33e4be-7329-468e-aee4-dc4b5fe081ba-serviceca\") pod \"node-ca-c45mn\" (UID: \"8e33e4be-7329-468e-aee4-dc4b5fe081ba\") " pod="openshift-image-registry/node-ca-c45mn" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.465956 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8e33e4be-7329-468e-aee4-dc4b5fe081ba-host\") pod \"node-ca-c45mn\" (UID: \"8e33e4be-7329-468e-aee4-dc4b5fe081ba\") " pod="openshift-image-registry/node-ca-c45mn" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.467561 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.468536 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.469510 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.470347 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.481360 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.493011 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.509264 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.528193 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.542068 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.555580 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.557531 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.557590 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.557604 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.557631 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.557648 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:16Z","lastTransitionTime":"2025-12-02T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.566685 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6m59s\" (UniqueName: \"kubernetes.io/projected/8e33e4be-7329-468e-aee4-dc4b5fe081ba-kube-api-access-6m59s\") pod \"node-ca-c45mn\" (UID: \"8e33e4be-7329-468e-aee4-dc4b5fe081ba\") " pod="openshift-image-registry/node-ca-c45mn" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.566862 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/8e33e4be-7329-468e-aee4-dc4b5fe081ba-serviceca\") pod \"node-ca-c45mn\" (UID: \"8e33e4be-7329-468e-aee4-dc4b5fe081ba\") " pod="openshift-image-registry/node-ca-c45mn" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.566949 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8e33e4be-7329-468e-aee4-dc4b5fe081ba-host\") pod \"node-ca-c45mn\" (UID: \"8e33e4be-7329-468e-aee4-dc4b5fe081ba\") " pod="openshift-image-registry/node-ca-c45mn" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.567055 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8e33e4be-7329-468e-aee4-dc4b5fe081ba-host\") pod \"node-ca-c45mn\" (UID: \"8e33e4be-7329-468e-aee4-dc4b5fe081ba\") " pod="openshift-image-registry/node-ca-c45mn" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.568083 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/8e33e4be-7329-468e-aee4-dc4b5fe081ba-serviceca\") pod \"node-ca-c45mn\" (UID: \"8e33e4be-7329-468e-aee4-dc4b5fe081ba\") " pod="openshift-image-registry/node-ca-c45mn" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.570852 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.586656 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.590674 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6m59s\" (UniqueName: \"kubernetes.io/projected/8e33e4be-7329-468e-aee4-dc4b5fe081ba-kube-api-access-6m59s\") pod \"node-ca-c45mn\" (UID: \"8e33e4be-7329-468e-aee4-dc4b5fe081ba\") " pod="openshift-image-registry/node-ca-c45mn" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.606626 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.619106 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.630568 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.646999 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.658719 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.660594 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.660632 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.660645 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.660663 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.660674 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:16Z","lastTransitionTime":"2025-12-02T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.672453 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.686037 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.764358 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.764405 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.764418 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.764435 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.764446 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:16Z","lastTransitionTime":"2025-12-02T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.779609 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-c45mn" Dec 02 16:43:16 crc kubenswrapper[4747]: W1202 16:43:16.799127 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8e33e4be_7329_468e_aee4_dc4b5fe081ba.slice/crio-c0db876e8d2887595172f9b98dc67ec956b7c02987de35e93fba62dddbf1becf WatchSource:0}: Error finding container c0db876e8d2887595172f9b98dc67ec956b7c02987de35e93fba62dddbf1becf: Status 404 returned error can't find the container with id c0db876e8d2887595172f9b98dc67ec956b7c02987de35e93fba62dddbf1becf Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.867975 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.868034 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.868047 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.868065 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.868077 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:16Z","lastTransitionTime":"2025-12-02T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.952863 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-c45mn" event={"ID":"8e33e4be-7329-468e-aee4-dc4b5fe081ba","Type":"ContainerStarted","Data":"c0db876e8d2887595172f9b98dc67ec956b7c02987de35e93fba62dddbf1becf"} Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.955752 4747 generic.go:334] "Generic (PLEG): container finished" podID="bc9a3ea0-15a0-4198-808a-b3bd9a9f4527" containerID="79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952" exitCode=0 Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.955797 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" event={"ID":"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527","Type":"ContainerDied","Data":"79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952"} Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.974637 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.975203 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.975218 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.975239 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.975259 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:16Z","lastTransitionTime":"2025-12-02T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.976571 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:16 crc kubenswrapper[4747]: I1202 16:43:16.996638 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:16Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.009696 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:17Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.021230 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:17Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.036742 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:17Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.050487 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:17Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.062386 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:17Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.079080 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.079133 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.079145 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.079165 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.079177 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:17Z","lastTransitionTime":"2025-12-02T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.083822 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:17Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.096266 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:17Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.107848 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:17Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.122214 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:17Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.138081 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:17Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.149315 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:17Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.161266 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:17Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.177514 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:17Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.181078 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.181131 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.181142 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.181166 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.181180 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:17Z","lastTransitionTime":"2025-12-02T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.305085 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.305131 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.305143 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.305163 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.305186 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:17Z","lastTransitionTime":"2025-12-02T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.407560 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.407605 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.407615 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.407633 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.407646 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:17Z","lastTransitionTime":"2025-12-02T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.510510 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.510559 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.510567 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.510583 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.510594 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:17Z","lastTransitionTime":"2025-12-02T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.613283 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.613325 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.613335 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.613354 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.613366 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:17Z","lastTransitionTime":"2025-12-02T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.716472 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.716519 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.716529 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.716550 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.716561 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:17Z","lastTransitionTime":"2025-12-02T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.760232 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:17 crc kubenswrapper[4747]: E1202 16:43:17.760758 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.760937 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:17 crc kubenswrapper[4747]: E1202 16:43:17.761083 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.761159 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:17 crc kubenswrapper[4747]: E1202 16:43:17.761211 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.819155 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.819193 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.819204 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.819223 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.819235 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:17Z","lastTransitionTime":"2025-12-02T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.921484 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.921530 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.921539 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.921554 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.921782 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:17Z","lastTransitionTime":"2025-12-02T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.961045 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-c45mn" event={"ID":"8e33e4be-7329-468e-aee4-dc4b5fe081ba","Type":"ContainerStarted","Data":"61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c"} Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.964663 4747 generic.go:334] "Generic (PLEG): container finished" podID="bc9a3ea0-15a0-4198-808a-b3bd9a9f4527" containerID="544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f" exitCode=0 Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.964730 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" event={"ID":"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527","Type":"ContainerDied","Data":"544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f"} Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.971397 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerStarted","Data":"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c"} Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.979217 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:17Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:17 crc kubenswrapper[4747]: I1202 16:43:17.991644 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:17Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.003777 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.027426 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.027494 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.027506 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.027527 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.027541 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:18Z","lastTransitionTime":"2025-12-02T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.030153 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.044724 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.045988 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.062058 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.075492 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.086539 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.103351 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.115080 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.125436 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.130231 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.130270 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.130282 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.130326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.130339 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:18Z","lastTransitionTime":"2025-12-02T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.138765 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.150182 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.160307 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.173109 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.192309 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.208761 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.225143 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.233048 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.233132 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.233162 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.233179 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.233189 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:18Z","lastTransitionTime":"2025-12-02T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.244962 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.264110 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.277409 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.317359 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.331491 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.335752 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.335783 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.335793 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.335810 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.335821 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:18Z","lastTransitionTime":"2025-12-02T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.350642 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.365273 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.377402 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.390276 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.405226 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.421037 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.433775 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.438572 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.438608 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.438619 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.438638 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.438651 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:18Z","lastTransitionTime":"2025-12-02T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.541253 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.541302 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.541311 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.541329 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.541341 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:18Z","lastTransitionTime":"2025-12-02T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.643848 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.643901 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.643935 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.643954 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.643964 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:18Z","lastTransitionTime":"2025-12-02T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.746657 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.746718 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.746735 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.746756 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.746768 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:18Z","lastTransitionTime":"2025-12-02T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.849377 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.849419 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.849431 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.849448 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.849459 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:18Z","lastTransitionTime":"2025-12-02T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.952591 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.952631 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.952643 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.952659 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.952672 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:18Z","lastTransitionTime":"2025-12-02T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.983954 4747 generic.go:334] "Generic (PLEG): container finished" podID="bc9a3ea0-15a0-4198-808a-b3bd9a9f4527" containerID="9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de" exitCode=0 Dec 02 16:43:18 crc kubenswrapper[4747]: I1202 16:43:18.984052 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" event={"ID":"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527","Type":"ContainerDied","Data":"9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de"} Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.001556 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:18Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.022738 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.038584 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.051564 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.057050 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.057222 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.057424 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.057585 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.057766 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:19Z","lastTransitionTime":"2025-12-02T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.067784 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.084588 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.103022 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.121599 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.140753 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.156146 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.163305 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.163369 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.163379 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.163397 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.163408 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:19Z","lastTransitionTime":"2025-12-02T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.194329 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.254514 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.266422 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.266459 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.266468 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.266484 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.266496 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:19Z","lastTransitionTime":"2025-12-02T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.277805 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.290551 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.302948 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.370646 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.370719 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.370730 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.370752 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.370770 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:19Z","lastTransitionTime":"2025-12-02T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.473601 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.473642 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.473654 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.473674 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.473692 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:19Z","lastTransitionTime":"2025-12-02T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.581227 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.581544 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.581560 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.581582 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.581595 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:19Z","lastTransitionTime":"2025-12-02T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.623559 4747 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.683736 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.683778 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.683787 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.683804 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.683815 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:19Z","lastTransitionTime":"2025-12-02T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.760057 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.760057 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:19 crc kubenswrapper[4747]: E1202 16:43:19.760316 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.760344 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:19 crc kubenswrapper[4747]: E1202 16:43:19.760448 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:19 crc kubenswrapper[4747]: E1202 16:43:19.760630 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.787080 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.787118 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.787130 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.787148 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.787158 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:19Z","lastTransitionTime":"2025-12-02T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.789089 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.806085 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.824977 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.839862 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.855482 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.869134 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.883591 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.889065 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.889099 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.889111 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.889128 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.889139 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:19Z","lastTransitionTime":"2025-12-02T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.897992 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.912104 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.931751 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.947458 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.959262 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.974381 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.991752 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.991796 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.991806 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.991822 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.991833 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:19Z","lastTransitionTime":"2025-12-02T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.991988 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:19Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:19 crc kubenswrapper[4747]: I1202 16:43:19.994616 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" event={"ID":"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527","Type":"ContainerStarted","Data":"2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75"} Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.006813 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerStarted","Data":"03aba9fbf2d85db7a7d7e7bf57a41165c5ee7fb8eee168ce3aeba669f9cf7857"} Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.008232 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:20Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.094825 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.094875 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.094887 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.094927 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.094942 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:20Z","lastTransitionTime":"2025-12-02T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.199209 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.199256 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.199269 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.199285 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.199299 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:20Z","lastTransitionTime":"2025-12-02T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.302101 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.302156 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.302168 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.302190 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.302204 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:20Z","lastTransitionTime":"2025-12-02T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.405461 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.405507 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.405518 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.405538 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.405553 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:20Z","lastTransitionTime":"2025-12-02T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.508591 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.508676 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.508701 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.508730 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.508748 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:20Z","lastTransitionTime":"2025-12-02T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.610681 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.610716 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.610727 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.610746 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.610756 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:20Z","lastTransitionTime":"2025-12-02T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.628234 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.628271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.628283 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.628300 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.628312 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:20Z","lastTransitionTime":"2025-12-02T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:20 crc kubenswrapper[4747]: E1202 16:43:20.639264 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:20Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.642080 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.642114 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.642124 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.642138 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.642147 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:20Z","lastTransitionTime":"2025-12-02T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:20 crc kubenswrapper[4747]: E1202 16:43:20.653241 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:20Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.657727 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.657786 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.657808 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.657827 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.657837 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:20Z","lastTransitionTime":"2025-12-02T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:20 crc kubenswrapper[4747]: E1202 16:43:20.668218 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:20Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.671230 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.671369 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.671437 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.671509 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.671570 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:20Z","lastTransitionTime":"2025-12-02T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:20 crc kubenswrapper[4747]: E1202 16:43:20.681720 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:20Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.684634 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.684663 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.684672 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.684686 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.684697 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:20Z","lastTransitionTime":"2025-12-02T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:20 crc kubenswrapper[4747]: E1202 16:43:20.694872 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:20Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:20 crc kubenswrapper[4747]: E1202 16:43:20.695000 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.714019 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.714091 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.714159 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.714184 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.714198 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:20Z","lastTransitionTime":"2025-12-02T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.816859 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.816928 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.816941 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.816962 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.816974 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:20Z","lastTransitionTime":"2025-12-02T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.920614 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.920667 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.920678 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.920696 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:20 crc kubenswrapper[4747]: I1202 16:43:20.920707 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:20Z","lastTransitionTime":"2025-12-02T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.016844 4747 generic.go:334] "Generic (PLEG): container finished" podID="bc9a3ea0-15a0-4198-808a-b3bd9a9f4527" containerID="2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75" exitCode=0 Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.016944 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" event={"ID":"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527","Type":"ContainerDied","Data":"2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75"} Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.017504 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.017581 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.023195 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.023293 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.023306 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.023351 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.023368 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:21Z","lastTransitionTime":"2025-12-02T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.036537 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.058417 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.070147 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.080036 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.094468 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.107814 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.120993 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.125482 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.125653 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.125731 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.125823 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.125899 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:21Z","lastTransitionTime":"2025-12-02T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.135582 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.147772 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.151192 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.151288 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.159597 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.178535 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.199561 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aba9fbf2d85db7a7d7e7bf57a41165c5ee7fb8eee168ce3aeba669f9cf7857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.219733 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.228695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.229020 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.229120 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.229219 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.229331 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:21Z","lastTransitionTime":"2025-12-02T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.234417 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.247296 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.267668 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aba9fbf2d85db7a7d7e7bf57a41165c5ee7fb8eee168ce3aeba669f9cf7857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.281041 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.292498 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.306669 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.318576 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.335018 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.335080 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.335099 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.335121 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.335137 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:21Z","lastTransitionTime":"2025-12-02T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.335713 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.348095 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.359123 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.368297 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.377861 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.394307 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.407106 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.418966 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.434757 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.437994 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.438020 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.438027 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.438040 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.438050 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:21Z","lastTransitionTime":"2025-12-02T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.447877 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:21Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.541013 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.541048 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.541059 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.541075 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.541085 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:21Z","lastTransitionTime":"2025-12-02T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.644057 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.644137 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.644151 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.644172 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.644187 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:21Z","lastTransitionTime":"2025-12-02T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.746925 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.746970 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.746983 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.747003 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.747018 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:21Z","lastTransitionTime":"2025-12-02T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.761175 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.761276 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.761277 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:21 crc kubenswrapper[4747]: E1202 16:43:21.761366 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:21 crc kubenswrapper[4747]: E1202 16:43:21.761520 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:21 crc kubenswrapper[4747]: E1202 16:43:21.761645 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.850067 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.850113 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.850125 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.850144 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.850160 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:21Z","lastTransitionTime":"2025-12-02T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.952059 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.952103 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.952116 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.952134 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:21 crc kubenswrapper[4747]: I1202 16:43:21.952147 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:21Z","lastTransitionTime":"2025-12-02T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.025105 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" event={"ID":"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527","Type":"ContainerStarted","Data":"3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05"} Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.025251 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.045986 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aba9fbf2d85db7a7d7e7bf57a41165c5ee7fb8eee168ce3aeba669f9cf7857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:22Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.054871 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.054935 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.054946 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.054961 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.054972 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:22Z","lastTransitionTime":"2025-12-02T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.059638 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:22Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.071067 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:22Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.084395 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:22Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.095875 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:22Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.111241 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:22Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.124924 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:22Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.136539 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:22Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.147697 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:22Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.157587 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.157770 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.157852 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.157946 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.158036 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:22Z","lastTransitionTime":"2025-12-02T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.160532 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:22Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.185099 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:22Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.196988 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:22Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.210586 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:22Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.224197 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:22Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.238684 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:22Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.261310 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.261349 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.261357 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.261375 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.261385 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:22Z","lastTransitionTime":"2025-12-02T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.364101 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.364161 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.364179 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.364197 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.364210 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:22Z","lastTransitionTime":"2025-12-02T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.466512 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.466557 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.466576 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.466595 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.466605 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:22Z","lastTransitionTime":"2025-12-02T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.569711 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.569757 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.569774 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.569795 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.569806 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:22Z","lastTransitionTime":"2025-12-02T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.672482 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.672530 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.672539 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.672556 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.672568 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:22Z","lastTransitionTime":"2025-12-02T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.775613 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.775674 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.775690 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.775712 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.775726 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:22Z","lastTransitionTime":"2025-12-02T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.878072 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.878147 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.878158 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.878175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.878187 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:22Z","lastTransitionTime":"2025-12-02T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.981170 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.981248 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.981260 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.981275 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:22 crc kubenswrapper[4747]: I1202 16:43:22.981284 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:22Z","lastTransitionTime":"2025-12-02T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.029453 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zmcxm_b62a2b51-3b8a-4786-97ee-01d2c6332c83/ovnkube-controller/0.log" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.031936 4747 generic.go:334] "Generic (PLEG): container finished" podID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerID="03aba9fbf2d85db7a7d7e7bf57a41165c5ee7fb8eee168ce3aeba669f9cf7857" exitCode=1 Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.032042 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerDied","Data":"03aba9fbf2d85db7a7d7e7bf57a41165c5ee7fb8eee168ce3aeba669f9cf7857"} Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.032703 4747 scope.go:117] "RemoveContainer" containerID="03aba9fbf2d85db7a7d7e7bf57a41165c5ee7fb8eee168ce3aeba669f9cf7857" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.044070 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:23Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.056877 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:23Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.075447 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:23Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.083352 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.083399 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.083409 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.083428 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.083439 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:23Z","lastTransitionTime":"2025-12-02T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.087130 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:23Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.100114 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:23Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.115461 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:23Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.130634 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:23Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.143792 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:23Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.157253 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:23Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.167682 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:23Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.186128 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.186172 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.186183 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.186201 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.186214 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:23Z","lastTransitionTime":"2025-12-02T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.188947 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:23Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.211033 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aba9fbf2d85db7a7d7e7bf57a41165c5ee7fb8eee168ce3aeba669f9cf7857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03aba9fbf2d85db7a7d7e7bf57a41165c5ee7fb8eee168ce3aeba669f9cf7857\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"message\\\":\\\"apis/informers/externalversions/factory.go:140\\\\nI1202 16:43:22.108861 6071 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 16:43:22.109390 6071 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 16:43:22.109484 6071 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 16:43:22.109511 6071 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 16:43:22.109514 6071 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1202 16:43:22.109528 6071 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1202 16:43:22.109530 6071 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 16:43:22.109541 6071 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 16:43:22.109553 6071 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 16:43:22.109543 6071 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1202 16:43:22.109565 6071 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1202 16:43:22.109572 6071 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 16:43:22.109592 6071 factory.go:656] Stopping watch factory\\\\nI1202 16:43:22.109607 6071 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:23Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.228957 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:23Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.243450 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:23Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.254699 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:23Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.288288 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.288326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.288336 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.288352 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.288362 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:23Z","lastTransitionTime":"2025-12-02T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.392086 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.392140 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.392151 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.392170 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.392184 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:23Z","lastTransitionTime":"2025-12-02T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.495230 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.495476 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.495486 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.495500 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.495509 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:23Z","lastTransitionTime":"2025-12-02T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.572741 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.572878 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.572934 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.572962 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.572988 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:23 crc kubenswrapper[4747]: E1202 16:43:23.573063 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:43:39.573031973 +0000 UTC m=+50.099920722 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:43:23 crc kubenswrapper[4747]: E1202 16:43:23.573107 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 16:43:23 crc kubenswrapper[4747]: E1202 16:43:23.573145 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 16:43:23 crc kubenswrapper[4747]: E1202 16:43:23.573166 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 16:43:23 crc kubenswrapper[4747]: E1202 16:43:23.573179 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:23 crc kubenswrapper[4747]: E1202 16:43:23.573199 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:39.573180247 +0000 UTC m=+50.100068996 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 16:43:23 crc kubenswrapper[4747]: E1202 16:43:23.573216 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:39.573206228 +0000 UTC m=+50.100094977 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:23 crc kubenswrapper[4747]: E1202 16:43:23.573120 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 16:43:23 crc kubenswrapper[4747]: E1202 16:43:23.573241 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 16:43:23 crc kubenswrapper[4747]: E1202 16:43:23.573280 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 16:43:23 crc kubenswrapper[4747]: E1202 16:43:23.573303 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:23 crc kubenswrapper[4747]: E1202 16:43:23.573341 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:39.573319421 +0000 UTC m=+50.100208170 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 16:43:23 crc kubenswrapper[4747]: E1202 16:43:23.573359 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:39.573353222 +0000 UTC m=+50.100241961 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.598761 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.598799 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.598809 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.598825 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.598835 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:23Z","lastTransitionTime":"2025-12-02T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.701315 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.701361 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.701372 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.701388 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.701397 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:23Z","lastTransitionTime":"2025-12-02T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.759890 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.759976 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.760042 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:23 crc kubenswrapper[4747]: E1202 16:43:23.760129 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:23 crc kubenswrapper[4747]: E1202 16:43:23.760233 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:23 crc kubenswrapper[4747]: E1202 16:43:23.760294 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.803652 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.803692 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.803700 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.803716 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.803725 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:23Z","lastTransitionTime":"2025-12-02T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.905979 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.906067 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.906078 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.906095 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:23 crc kubenswrapper[4747]: I1202 16:43:23.906108 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:23Z","lastTransitionTime":"2025-12-02T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.008441 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.008480 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.008489 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.008504 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.008513 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:24Z","lastTransitionTime":"2025-12-02T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.036883 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zmcxm_b62a2b51-3b8a-4786-97ee-01d2c6332c83/ovnkube-controller/0.log" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.038844 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerStarted","Data":"8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4"} Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.039019 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.052414 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:24Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.065703 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:24Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.085015 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:24Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.107830 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:24Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.112236 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.112304 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.112316 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.112337 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.112350 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:24Z","lastTransitionTime":"2025-12-02T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.123258 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:24Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.142522 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:24Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.158171 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:24Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.181341 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:24Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.201137 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:24Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.216301 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.216359 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.216370 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.216391 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.216404 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:24Z","lastTransitionTime":"2025-12-02T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.225172 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:24Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.239381 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:24Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.257399 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:24Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.282862 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03aba9fbf2d85db7a7d7e7bf57a41165c5ee7fb8eee168ce3aeba669f9cf7857\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"message\\\":\\\"apis/informers/externalversions/factory.go:140\\\\nI1202 16:43:22.108861 6071 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 16:43:22.109390 6071 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 16:43:22.109484 6071 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 16:43:22.109511 6071 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 16:43:22.109514 6071 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1202 16:43:22.109528 6071 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1202 16:43:22.109530 6071 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 16:43:22.109541 6071 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 16:43:22.109553 6071 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 16:43:22.109543 6071 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1202 16:43:22.109565 6071 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1202 16:43:22.109572 6071 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 16:43:22.109592 6071 factory.go:656] Stopping watch factory\\\\nI1202 16:43:22.109607 6071 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:24Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.297883 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:24Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.317619 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:24Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.319533 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.319575 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.319586 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.319605 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.319618 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:24Z","lastTransitionTime":"2025-12-02T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.422510 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.422556 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.422566 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.422582 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.422591 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:24Z","lastTransitionTime":"2025-12-02T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.524821 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.525156 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.525270 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.525344 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.525410 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:24Z","lastTransitionTime":"2025-12-02T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.628141 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.628190 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.628205 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.628226 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.628235 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:24Z","lastTransitionTime":"2025-12-02T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.731282 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.731355 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.731366 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.731405 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.731420 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:24Z","lastTransitionTime":"2025-12-02T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.834141 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.834199 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.834214 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.834236 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.834248 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:24Z","lastTransitionTime":"2025-12-02T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.937246 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.937300 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.937308 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.937326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:24 crc kubenswrapper[4747]: I1202 16:43:24.937336 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:24Z","lastTransitionTime":"2025-12-02T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.027419 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz"] Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.027944 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.029854 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.030245 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.043531 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.043569 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.043582 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.043677 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.043695 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:25Z","lastTransitionTime":"2025-12-02T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.044196 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.050150 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:25Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.065729 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:25Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.077482 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:25Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.091337 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/eec908e5-57bd-451d-92da-0f805a5b7254-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-98rgz\" (UID: \"eec908e5-57bd-451d-92da-0f805a5b7254\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.091391 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkd2m\" (UniqueName: \"kubernetes.io/projected/eec908e5-57bd-451d-92da-0f805a5b7254-kube-api-access-xkd2m\") pod \"ovnkube-control-plane-749d76644c-98rgz\" (UID: \"eec908e5-57bd-451d-92da-0f805a5b7254\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.091451 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/eec908e5-57bd-451d-92da-0f805a5b7254-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-98rgz\" (UID: \"eec908e5-57bd-451d-92da-0f805a5b7254\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.091478 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/eec908e5-57bd-451d-92da-0f805a5b7254-env-overrides\") pod \"ovnkube-control-plane-749d76644c-98rgz\" (UID: \"eec908e5-57bd-451d-92da-0f805a5b7254\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.092468 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:25Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.106448 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:25Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.116887 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:25Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.131610 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:25Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.146016 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.146064 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.146077 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.146096 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.146107 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:25Z","lastTransitionTime":"2025-12-02T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.149744 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03aba9fbf2d85db7a7d7e7bf57a41165c5ee7fb8eee168ce3aeba669f9cf7857\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"message\\\":\\\"apis/informers/externalversions/factory.go:140\\\\nI1202 16:43:22.108861 6071 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 16:43:22.109390 6071 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 16:43:22.109484 6071 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 16:43:22.109511 6071 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 16:43:22.109514 6071 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1202 16:43:22.109528 6071 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1202 16:43:22.109530 6071 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 16:43:22.109541 6071 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 16:43:22.109553 6071 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 16:43:22.109543 6071 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1202 16:43:22.109565 6071 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1202 16:43:22.109572 6071 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 16:43:22.109592 6071 factory.go:656] Stopping watch factory\\\\nI1202 16:43:22.109607 6071 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:25Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.162120 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:25Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.174439 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:25Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.188686 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:25Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.192515 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/eec908e5-57bd-451d-92da-0f805a5b7254-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-98rgz\" (UID: \"eec908e5-57bd-451d-92da-0f805a5b7254\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.192564 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/eec908e5-57bd-451d-92da-0f805a5b7254-env-overrides\") pod \"ovnkube-control-plane-749d76644c-98rgz\" (UID: \"eec908e5-57bd-451d-92da-0f805a5b7254\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.192638 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/eec908e5-57bd-451d-92da-0f805a5b7254-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-98rgz\" (UID: \"eec908e5-57bd-451d-92da-0f805a5b7254\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.192676 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkd2m\" (UniqueName: \"kubernetes.io/projected/eec908e5-57bd-451d-92da-0f805a5b7254-kube-api-access-xkd2m\") pod \"ovnkube-control-plane-749d76644c-98rgz\" (UID: \"eec908e5-57bd-451d-92da-0f805a5b7254\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.193416 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/eec908e5-57bd-451d-92da-0f805a5b7254-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-98rgz\" (UID: \"eec908e5-57bd-451d-92da-0f805a5b7254\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.193473 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/eec908e5-57bd-451d-92da-0f805a5b7254-env-overrides\") pod \"ovnkube-control-plane-749d76644c-98rgz\" (UID: \"eec908e5-57bd-451d-92da-0f805a5b7254\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.199227 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/eec908e5-57bd-451d-92da-0f805a5b7254-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-98rgz\" (UID: \"eec908e5-57bd-451d-92da-0f805a5b7254\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.207315 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eec908e5-57bd-451d-92da-0f805a5b7254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-98rgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:25Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.211870 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkd2m\" (UniqueName: \"kubernetes.io/projected/eec908e5-57bd-451d-92da-0f805a5b7254-kube-api-access-xkd2m\") pod \"ovnkube-control-plane-749d76644c-98rgz\" (UID: \"eec908e5-57bd-451d-92da-0f805a5b7254\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.221656 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:25Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.245153 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:25Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.249208 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.249258 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.249269 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.249287 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.249300 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:25Z","lastTransitionTime":"2025-12-02T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.260980 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:25Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.279123 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:25Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.354185 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.357890 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.357949 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.357993 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.358013 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.358069 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:25Z","lastTransitionTime":"2025-12-02T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.460943 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.461009 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.461021 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.461038 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.461049 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:25Z","lastTransitionTime":"2025-12-02T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.564302 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.564582 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.564640 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.564758 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.564837 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:25Z","lastTransitionTime":"2025-12-02T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.668038 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.668081 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.668093 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.668109 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.668119 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:25Z","lastTransitionTime":"2025-12-02T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.759852 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.759934 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:25 crc kubenswrapper[4747]: E1202 16:43:25.760007 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:25 crc kubenswrapper[4747]: E1202 16:43:25.760133 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.760274 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:25 crc kubenswrapper[4747]: E1202 16:43:25.760363 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.772118 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.772166 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.772175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.772195 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.772206 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:25Z","lastTransitionTime":"2025-12-02T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.875094 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.875134 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.875145 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.875162 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.875174 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:25Z","lastTransitionTime":"2025-12-02T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.977982 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.978022 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.978032 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.978048 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:25 crc kubenswrapper[4747]: I1202 16:43:25.978085 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:25Z","lastTransitionTime":"2025-12-02T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.052038 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zmcxm_b62a2b51-3b8a-4786-97ee-01d2c6332c83/ovnkube-controller/1.log" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.053032 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zmcxm_b62a2b51-3b8a-4786-97ee-01d2c6332c83/ovnkube-controller/0.log" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.055566 4747 generic.go:334] "Generic (PLEG): container finished" podID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerID="8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4" exitCode=1 Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.055667 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerDied","Data":"8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4"} Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.055768 4747 scope.go:117] "RemoveContainer" containerID="03aba9fbf2d85db7a7d7e7bf57a41165c5ee7fb8eee168ce3aeba669f9cf7857" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.056503 4747 scope.go:117] "RemoveContainer" containerID="8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4" Dec 02 16:43:26 crc kubenswrapper[4747]: E1202 16:43:26.056701 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-zmcxm_openshift-ovn-kubernetes(b62a2b51-3b8a-4786-97ee-01d2c6332c83)\"" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.057577 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" event={"ID":"eec908e5-57bd-451d-92da-0f805a5b7254","Type":"ContainerStarted","Data":"bb377c7c7f2aa97807da6355018d3cb4cdf4bb3a35435e9f095d1718ad081d45"} Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.057618 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" event={"ID":"eec908e5-57bd-451d-92da-0f805a5b7254","Type":"ContainerStarted","Data":"8d4406608cc11519815926d3a7412e4b39c5e47b29b1705d3ae902e07937fa40"} Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.071983 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.081290 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.081355 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.081364 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.081382 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.081394 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:26Z","lastTransitionTime":"2025-12-02T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.087889 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.100591 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.123084 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.136328 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.149100 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.164086 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.179896 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.190620 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.190668 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.190679 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.190702 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.190713 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:26Z","lastTransitionTime":"2025-12-02T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.210115 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03aba9fbf2d85db7a7d7e7bf57a41165c5ee7fb8eee168ce3aeba669f9cf7857\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"message\\\":\\\"apis/informers/externalversions/factory.go:140\\\\nI1202 16:43:22.108861 6071 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 16:43:22.109390 6071 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 16:43:22.109484 6071 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 16:43:22.109511 6071 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 16:43:22.109514 6071 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1202 16:43:22.109528 6071 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1202 16:43:22.109530 6071 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 16:43:22.109541 6071 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 16:43:22.109553 6071 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 16:43:22.109543 6071 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1202 16:43:22.109565 6071 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1202 16:43:22.109572 6071 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 16:43:22.109592 6071 factory.go:656] Stopping watch factory\\\\nI1202 16:43:22.109607 6071 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"message\\\":\\\"ator/iptables-alerter-4ln5h in node crc\\\\nI1202 16:43:23.843885 6217 services_controller.go:451] Built service openshift-oauth-apiserver/api cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-oauth-apiserver/api_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-oauth-apiserver/api\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.140\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1202 16:43:23.843927 6217 services_controller.go:452] Built service openshift-oauth-apiserver/api per-node LB for network=default: []services.LB{}\\\\nI1202 16:43:23.843932 6217 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1202 16:43:23.843937 6217 services_controller.go:453] Built service openshift-oauth-apiserver/api template LB for network=default: []services.LB{}\\\\nF1202 16:43:23.843992 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.228800 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.242771 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.261049 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.277597 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eec908e5-57bd-451d-92da-0f805a5b7254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-98rgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.292934 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.292971 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.292983 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.293001 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.293012 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:26Z","lastTransitionTime":"2025-12-02T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.293666 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.307407 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.320399 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.395826 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.395866 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.395876 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.395893 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.395924 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:26Z","lastTransitionTime":"2025-12-02T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.491028 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-8brc6"] Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.491630 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:26 crc kubenswrapper[4747]: E1202 16:43:26.491731 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.498122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.498159 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.498169 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.498183 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.498195 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:26Z","lastTransitionTime":"2025-12-02T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.509547 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.532358 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.545374 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.560534 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.578692 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.595682 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.600326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.600374 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.600388 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.600406 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.600417 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:26Z","lastTransitionTime":"2025-12-02T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.607371 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.614213 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs\") pod \"network-metrics-daemon-8brc6\" (UID: \"fe274425-e804-4934-aa14-81ef24981fe9\") " pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.614303 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gclj\" (UniqueName: \"kubernetes.io/projected/fe274425-e804-4934-aa14-81ef24981fe9-kube-api-access-4gclj\") pod \"network-metrics-daemon-8brc6\" (UID: \"fe274425-e804-4934-aa14-81ef24981fe9\") " pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.624950 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.636120 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8brc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe274425-e804-4934-aa14-81ef24981fe9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:26Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8brc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.649385 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.661777 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.680576 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.701381 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03aba9fbf2d85db7a7d7e7bf57a41165c5ee7fb8eee168ce3aeba669f9cf7857\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"message\\\":\\\"apis/informers/externalversions/factory.go:140\\\\nI1202 16:43:22.108861 6071 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 16:43:22.109390 6071 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 16:43:22.109484 6071 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 16:43:22.109511 6071 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 16:43:22.109514 6071 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1202 16:43:22.109528 6071 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1202 16:43:22.109530 6071 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 16:43:22.109541 6071 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 16:43:22.109553 6071 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 16:43:22.109543 6071 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1202 16:43:22.109565 6071 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1202 16:43:22.109572 6071 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 16:43:22.109592 6071 factory.go:656] Stopping watch factory\\\\nI1202 16:43:22.109607 6071 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"message\\\":\\\"ator/iptables-alerter-4ln5h in node crc\\\\nI1202 16:43:23.843885 6217 services_controller.go:451] Built service openshift-oauth-apiserver/api cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-oauth-apiserver/api_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-oauth-apiserver/api\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.140\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1202 16:43:23.843927 6217 services_controller.go:452] Built service openshift-oauth-apiserver/api per-node LB for network=default: []services.LB{}\\\\nI1202 16:43:23.843932 6217 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1202 16:43:23.843937 6217 services_controller.go:453] Built service openshift-oauth-apiserver/api template LB for network=default: []services.LB{}\\\\nF1202 16:43:23.843992 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.703782 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.703832 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.703847 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.703866 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.703878 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:26Z","lastTransitionTime":"2025-12-02T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.715611 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs\") pod \"network-metrics-daemon-8brc6\" (UID: \"fe274425-e804-4934-aa14-81ef24981fe9\") " pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.715715 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gclj\" (UniqueName: \"kubernetes.io/projected/fe274425-e804-4934-aa14-81ef24981fe9-kube-api-access-4gclj\") pod \"network-metrics-daemon-8brc6\" (UID: \"fe274425-e804-4934-aa14-81ef24981fe9\") " pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:26 crc kubenswrapper[4747]: E1202 16:43:26.716336 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 16:43:26 crc kubenswrapper[4747]: E1202 16:43:26.716436 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs podName:fe274425-e804-4934-aa14-81ef24981fe9 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:27.21641439 +0000 UTC m=+37.743303139 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs") pod "network-metrics-daemon-8brc6" (UID: "fe274425-e804-4934-aa14-81ef24981fe9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.720085 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.734595 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.741278 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gclj\" (UniqueName: \"kubernetes.io/projected/fe274425-e804-4934-aa14-81ef24981fe9-kube-api-access-4gclj\") pod \"network-metrics-daemon-8brc6\" (UID: \"fe274425-e804-4934-aa14-81ef24981fe9\") " pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.752677 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.767073 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eec908e5-57bd-451d-92da-0f805a5b7254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-98rgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:26Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.807862 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.807933 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.807947 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.807965 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.807977 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:26Z","lastTransitionTime":"2025-12-02T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.911627 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.911703 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.911727 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.911752 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:26 crc kubenswrapper[4747]: I1202 16:43:26.911771 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:26Z","lastTransitionTime":"2025-12-02T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.015046 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.015098 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.015110 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.015131 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.015148 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:27Z","lastTransitionTime":"2025-12-02T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.063197 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" event={"ID":"eec908e5-57bd-451d-92da-0f805a5b7254","Type":"ContainerStarted","Data":"63ab2655070cc0e23bd5b63b824d193119179b2413ffa8978b4e98b9fbf59bf6"} Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.065193 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zmcxm_b62a2b51-3b8a-4786-97ee-01d2c6332c83/ovnkube-controller/1.log" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.082280 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:27Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.096495 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:27Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.108435 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:27Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.117640 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.117678 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.117687 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.117703 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.117717 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:27Z","lastTransitionTime":"2025-12-02T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.122891 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:27Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.136587 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8brc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe274425-e804-4934-aa14-81ef24981fe9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:26Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8brc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:27Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.149467 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:27Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.160594 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:27Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.180493 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:27Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.201993 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03aba9fbf2d85db7a7d7e7bf57a41165c5ee7fb8eee168ce3aeba669f9cf7857\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"message\\\":\\\"apis/informers/externalversions/factory.go:140\\\\nI1202 16:43:22.108861 6071 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 16:43:22.109390 6071 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 16:43:22.109484 6071 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 16:43:22.109511 6071 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 16:43:22.109514 6071 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1202 16:43:22.109528 6071 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1202 16:43:22.109530 6071 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 16:43:22.109541 6071 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 16:43:22.109553 6071 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 16:43:22.109543 6071 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1202 16:43:22.109565 6071 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1202 16:43:22.109572 6071 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 16:43:22.109592 6071 factory.go:656] Stopping watch factory\\\\nI1202 16:43:22.109607 6071 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"message\\\":\\\"ator/iptables-alerter-4ln5h in node crc\\\\nI1202 16:43:23.843885 6217 services_controller.go:451] Built service openshift-oauth-apiserver/api cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-oauth-apiserver/api_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-oauth-apiserver/api\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.140\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1202 16:43:23.843927 6217 services_controller.go:452] Built service openshift-oauth-apiserver/api per-node LB for network=default: []services.LB{}\\\\nI1202 16:43:23.843932 6217 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1202 16:43:23.843937 6217 services_controller.go:453] Built service openshift-oauth-apiserver/api template LB for network=default: []services.LB{}\\\\nF1202 16:43:23.843992 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:27Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.220236 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs\") pod \"network-metrics-daemon-8brc6\" (UID: \"fe274425-e804-4934-aa14-81ef24981fe9\") " pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:27 crc kubenswrapper[4747]: E1202 16:43:27.220500 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 16:43:27 crc kubenswrapper[4747]: E1202 16:43:27.220611 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs podName:fe274425-e804-4934-aa14-81ef24981fe9 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:28.220585109 +0000 UTC m=+38.747474018 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs") pod "network-metrics-daemon-8brc6" (UID: "fe274425-e804-4934-aa14-81ef24981fe9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.220615 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.220640 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.220649 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.220666 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.220677 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:27Z","lastTransitionTime":"2025-12-02T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.222478 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:27Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.237997 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:27Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.250151 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:27Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.261700 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eec908e5-57bd-451d-92da-0f805a5b7254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb377c7c7f2aa97807da6355018d3cb4cdf4bb3a35435e9f095d1718ad081d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ab2655070cc0e23bd5b63b824d193119179b2413ffa8978b4e98b9fbf59bf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-98rgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:27Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.284439 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:27Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.301437 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:27Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.312543 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:27Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.323101 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.323144 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.323155 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.323172 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.323185 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:27Z","lastTransitionTime":"2025-12-02T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.330001 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:27Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.425058 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.425105 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.425116 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.425134 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.425145 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:27Z","lastTransitionTime":"2025-12-02T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.528228 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.528273 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.528284 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.528300 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.528310 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:27Z","lastTransitionTime":"2025-12-02T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.631106 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.631151 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.631159 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.631184 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.631194 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:27Z","lastTransitionTime":"2025-12-02T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.734049 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.734104 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.734115 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.734135 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.734150 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:27Z","lastTransitionTime":"2025-12-02T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.760400 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:27 crc kubenswrapper[4747]: E1202 16:43:27.760612 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.761236 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.761306 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:27 crc kubenswrapper[4747]: E1202 16:43:27.761430 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:27 crc kubenswrapper[4747]: E1202 16:43:27.761574 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.836495 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.836553 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.836565 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.836585 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.836598 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:27Z","lastTransitionTime":"2025-12-02T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.940955 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.942054 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.942078 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.942111 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:27 crc kubenswrapper[4747]: I1202 16:43:27.942125 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:27Z","lastTransitionTime":"2025-12-02T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.045108 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.045172 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.045194 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.045227 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.045246 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:28Z","lastTransitionTime":"2025-12-02T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.148813 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.148856 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.148866 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.148885 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.148895 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:28Z","lastTransitionTime":"2025-12-02T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.230830 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs\") pod \"network-metrics-daemon-8brc6\" (UID: \"fe274425-e804-4934-aa14-81ef24981fe9\") " pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:28 crc kubenswrapper[4747]: E1202 16:43:28.231112 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 16:43:28 crc kubenswrapper[4747]: E1202 16:43:28.231233 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs podName:fe274425-e804-4934-aa14-81ef24981fe9 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:30.231207581 +0000 UTC m=+40.758096400 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs") pod "network-metrics-daemon-8brc6" (UID: "fe274425-e804-4934-aa14-81ef24981fe9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.251971 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.252489 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.252674 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.252782 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.252892 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:28Z","lastTransitionTime":"2025-12-02T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.356326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.356631 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.356737 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.356845 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.356950 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:28Z","lastTransitionTime":"2025-12-02T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.459897 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.459950 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.459963 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.459994 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.460005 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:28Z","lastTransitionTime":"2025-12-02T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.562308 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.562362 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.562375 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.562420 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.562439 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:28Z","lastTransitionTime":"2025-12-02T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.665704 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.665748 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.665761 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.665777 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.665792 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:28Z","lastTransitionTime":"2025-12-02T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.760228 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:28 crc kubenswrapper[4747]: E1202 16:43:28.761010 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.769413 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.769462 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.769473 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.769502 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.769515 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:28Z","lastTransitionTime":"2025-12-02T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.872999 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.873045 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.873083 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.873107 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.873121 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:28Z","lastTransitionTime":"2025-12-02T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.975381 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.975424 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.975434 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.975454 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:28 crc kubenswrapper[4747]: I1202 16:43:28.975465 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:28Z","lastTransitionTime":"2025-12-02T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.077661 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.077709 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.077720 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.077740 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.077752 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:29Z","lastTransitionTime":"2025-12-02T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.179836 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.179876 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.179886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.179922 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.179933 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:29Z","lastTransitionTime":"2025-12-02T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.283004 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.283039 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.283047 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.283062 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.283072 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:29Z","lastTransitionTime":"2025-12-02T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.385344 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.385423 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.385437 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.385457 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.385469 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:29Z","lastTransitionTime":"2025-12-02T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.492512 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.492578 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.492594 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.492616 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.492634 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:29Z","lastTransitionTime":"2025-12-02T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.595651 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.595978 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.596120 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.596266 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.596379 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:29Z","lastTransitionTime":"2025-12-02T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.699144 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.699195 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.699229 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.699251 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.699261 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:29Z","lastTransitionTime":"2025-12-02T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.759512 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:29 crc kubenswrapper[4747]: E1202 16:43:29.759674 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.759877 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:29 crc kubenswrapper[4747]: E1202 16:43:29.760014 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.760746 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:29 crc kubenswrapper[4747]: E1202 16:43:29.760845 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.776925 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:29Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.800767 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03aba9fbf2d85db7a7d7e7bf57a41165c5ee7fb8eee168ce3aeba669f9cf7857\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"message\\\":\\\"apis/informers/externalversions/factory.go:140\\\\nI1202 16:43:22.108861 6071 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1202 16:43:22.109390 6071 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1202 16:43:22.109484 6071 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1202 16:43:22.109511 6071 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1202 16:43:22.109514 6071 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1202 16:43:22.109528 6071 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1202 16:43:22.109530 6071 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1202 16:43:22.109541 6071 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1202 16:43:22.109553 6071 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1202 16:43:22.109543 6071 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1202 16:43:22.109565 6071 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1202 16:43:22.109572 6071 handler.go:208] Removed *v1.Node event handler 2\\\\nI1202 16:43:22.109592 6071 factory.go:656] Stopping watch factory\\\\nI1202 16:43:22.109607 6071 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"message\\\":\\\"ator/iptables-alerter-4ln5h in node crc\\\\nI1202 16:43:23.843885 6217 services_controller.go:451] Built service openshift-oauth-apiserver/api cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-oauth-apiserver/api_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-oauth-apiserver/api\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.140\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1202 16:43:23.843927 6217 services_controller.go:452] Built service openshift-oauth-apiserver/api per-node LB for network=default: []services.LB{}\\\\nI1202 16:43:23.843932 6217 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1202 16:43:23.843937 6217 services_controller.go:453] Built service openshift-oauth-apiserver/api template LB for network=default: []services.LB{}\\\\nF1202 16:43:23.843992 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:29Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.801466 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.801496 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.801504 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.801521 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.801530 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:29Z","lastTransitionTime":"2025-12-02T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.814105 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:29Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.826685 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:29Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.838991 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:29Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.852212 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eec908e5-57bd-451d-92da-0f805a5b7254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb377c7c7f2aa97807da6355018d3cb4cdf4bb3a35435e9f095d1718ad081d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ab2655070cc0e23bd5b63b824d193119179b2413ffa8978b4e98b9fbf59bf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-98rgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:29Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.864990 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:29Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.878559 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:29Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.897878 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:29Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.904209 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.904242 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.904252 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.904269 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.904282 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:29Z","lastTransitionTime":"2025-12-02T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.910674 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:29Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.923690 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:29Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.936276 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:29Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.953228 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:29Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.967767 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:29Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.985726 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:29Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:29 crc kubenswrapper[4747]: I1202 16:43:29.999386 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8brc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe274425-e804-4934-aa14-81ef24981fe9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:26Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8brc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:29Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.007101 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.007237 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.007371 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.007450 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.007510 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:30Z","lastTransitionTime":"2025-12-02T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.018432 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:30Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.110494 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.110565 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.110578 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.110596 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.110607 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:30Z","lastTransitionTime":"2025-12-02T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.213423 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.213475 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.213485 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.213504 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.213513 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:30Z","lastTransitionTime":"2025-12-02T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.252412 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs\") pod \"network-metrics-daemon-8brc6\" (UID: \"fe274425-e804-4934-aa14-81ef24981fe9\") " pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:30 crc kubenswrapper[4747]: E1202 16:43:30.252771 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 16:43:30 crc kubenswrapper[4747]: E1202 16:43:30.252861 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs podName:fe274425-e804-4934-aa14-81ef24981fe9 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:34.252834734 +0000 UTC m=+44.779723483 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs") pod "network-metrics-daemon-8brc6" (UID: "fe274425-e804-4934-aa14-81ef24981fe9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.316073 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.316148 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.316165 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.316188 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.316204 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:30Z","lastTransitionTime":"2025-12-02T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.419387 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.419458 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.419471 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.419492 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.419503 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:30Z","lastTransitionTime":"2025-12-02T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.521999 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.522040 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.522051 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.522070 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.522105 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:30Z","lastTransitionTime":"2025-12-02T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.624935 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.624990 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.625003 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.625021 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.625032 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:30Z","lastTransitionTime":"2025-12-02T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.728308 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.728366 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.728382 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.728406 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.728419 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:30Z","lastTransitionTime":"2025-12-02T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.759678 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:30 crc kubenswrapper[4747]: E1202 16:43:30.759881 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.779973 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.780021 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.780031 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.780049 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.780060 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:30Z","lastTransitionTime":"2025-12-02T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:30 crc kubenswrapper[4747]: E1202 16:43:30.795329 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:30Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.799392 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.799461 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.799474 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.799491 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.799504 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:30Z","lastTransitionTime":"2025-12-02T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:30 crc kubenswrapper[4747]: E1202 16:43:30.816658 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:30Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.820842 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.820950 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.820977 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.821004 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.821022 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:30Z","lastTransitionTime":"2025-12-02T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:30 crc kubenswrapper[4747]: E1202 16:43:30.841142 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:30Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.844889 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.844959 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.844971 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.844991 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.845005 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:30Z","lastTransitionTime":"2025-12-02T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:30 crc kubenswrapper[4747]: E1202 16:43:30.859137 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:30Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.863632 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.863690 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.863702 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.863723 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.863735 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:30Z","lastTransitionTime":"2025-12-02T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:30 crc kubenswrapper[4747]: E1202 16:43:30.878676 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:30Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:30 crc kubenswrapper[4747]: E1202 16:43:30.878817 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.881268 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.881304 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.881318 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.881340 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.881354 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:30Z","lastTransitionTime":"2025-12-02T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.985199 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.985269 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.985288 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.985313 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:30 crc kubenswrapper[4747]: I1202 16:43:30.985335 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:30Z","lastTransitionTime":"2025-12-02T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.002479 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.004377 4747 scope.go:117] "RemoveContainer" containerID="8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4" Dec 02 16:43:31 crc kubenswrapper[4747]: E1202 16:43:31.004769 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-zmcxm_openshift-ovn-kubernetes(b62a2b51-3b8a-4786-97ee-01d2c6332c83)\"" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.021652 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:31Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.035013 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:31Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.052064 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:31Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.075038 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:31Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.087982 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.088024 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.088037 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.088052 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.088063 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:31Z","lastTransitionTime":"2025-12-02T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.091738 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:31Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.108109 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:31Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.122850 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8brc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe274425-e804-4934-aa14-81ef24981fe9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:26Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8brc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:31Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.140524 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:31Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.156236 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:31Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.182228 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"message\\\":\\\"ator/iptables-alerter-4ln5h in node crc\\\\nI1202 16:43:23.843885 6217 services_controller.go:451] Built service openshift-oauth-apiserver/api cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-oauth-apiserver/api_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-oauth-apiserver/api\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.140\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1202 16:43:23.843927 6217 services_controller.go:452] Built service openshift-oauth-apiserver/api per-node LB for network=default: []services.LB{}\\\\nI1202 16:43:23.843932 6217 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1202 16:43:23.843937 6217 services_controller.go:453] Built service openshift-oauth-apiserver/api template LB for network=default: []services.LB{}\\\\nF1202 16:43:23.843992 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-zmcxm_openshift-ovn-kubernetes(b62a2b51-3b8a-4786-97ee-01d2c6332c83)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:31Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.190870 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.190934 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.190945 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.190973 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.190989 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:31Z","lastTransitionTime":"2025-12-02T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.198437 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:31Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.209070 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:31Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.224254 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:31Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.237245 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eec908e5-57bd-451d-92da-0f805a5b7254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb377c7c7f2aa97807da6355018d3cb4cdf4bb3a35435e9f095d1718ad081d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ab2655070cc0e23bd5b63b824d193119179b2413ffa8978b4e98b9fbf59bf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-98rgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:31Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.250607 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:31Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.262521 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:31Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.274866 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:31Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.295655 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.295738 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.295756 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.295780 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.295795 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:31Z","lastTransitionTime":"2025-12-02T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.399372 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.399440 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.399452 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.399472 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.399486 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:31Z","lastTransitionTime":"2025-12-02T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.501794 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.501840 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.501851 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.501872 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.501885 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:31Z","lastTransitionTime":"2025-12-02T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.605572 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.605647 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.605668 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.605699 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.605719 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:31Z","lastTransitionTime":"2025-12-02T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.708610 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.708657 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.708668 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.708684 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.708700 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:31Z","lastTransitionTime":"2025-12-02T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.759893 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.759939 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:31 crc kubenswrapper[4747]: E1202 16:43:31.760091 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:31 crc kubenswrapper[4747]: E1202 16:43:31.760135 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.760457 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:31 crc kubenswrapper[4747]: E1202 16:43:31.760548 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.811377 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.811443 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.811459 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.811480 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.811494 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:31Z","lastTransitionTime":"2025-12-02T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.914821 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.914886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.914961 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.914991 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:31 crc kubenswrapper[4747]: I1202 16:43:31.915008 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:31Z","lastTransitionTime":"2025-12-02T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.017109 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.017147 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.017157 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.017174 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.017184 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:32Z","lastTransitionTime":"2025-12-02T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.119807 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.120175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.120272 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.120362 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.120439 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:32Z","lastTransitionTime":"2025-12-02T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.222612 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.222672 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.222684 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.222704 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.222721 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:32Z","lastTransitionTime":"2025-12-02T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.325291 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.325335 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.325351 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.325373 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.325390 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:32Z","lastTransitionTime":"2025-12-02T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.428429 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.428479 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.428491 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.428508 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.428520 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:32Z","lastTransitionTime":"2025-12-02T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.530966 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.531009 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.531019 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.531039 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.531057 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:32Z","lastTransitionTime":"2025-12-02T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.634295 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.634359 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.634368 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.634384 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.634394 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:32Z","lastTransitionTime":"2025-12-02T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.737426 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.737472 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.737481 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.737504 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.737514 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:32Z","lastTransitionTime":"2025-12-02T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.759821 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:32 crc kubenswrapper[4747]: E1202 16:43:32.760019 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.839970 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.840013 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.840024 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.840041 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.840052 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:32Z","lastTransitionTime":"2025-12-02T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.942930 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.942971 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.942982 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.942999 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:32 crc kubenswrapper[4747]: I1202 16:43:32.943011 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:32Z","lastTransitionTime":"2025-12-02T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.046004 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.046077 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.046094 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.046538 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.046594 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:33Z","lastTransitionTime":"2025-12-02T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.149777 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.149813 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.149829 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.149845 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.149855 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:33Z","lastTransitionTime":"2025-12-02T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.253057 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.253109 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.253123 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.253149 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.253166 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:33Z","lastTransitionTime":"2025-12-02T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.355250 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.355584 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.355673 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.355764 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.355852 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:33Z","lastTransitionTime":"2025-12-02T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.459019 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.459273 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.459409 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.459500 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.459577 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:33Z","lastTransitionTime":"2025-12-02T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.562364 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.562408 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.562418 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.562433 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.562443 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:33Z","lastTransitionTime":"2025-12-02T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.665968 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.666045 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.666058 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.666078 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.666092 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:33Z","lastTransitionTime":"2025-12-02T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.760005 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.760194 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:33 crc kubenswrapper[4747]: E1202 16:43:33.760193 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.760264 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:33 crc kubenswrapper[4747]: E1202 16:43:33.760328 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:33 crc kubenswrapper[4747]: E1202 16:43:33.761032 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.769121 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.769201 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.769218 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.769264 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.769276 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:33Z","lastTransitionTime":"2025-12-02T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.871767 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.872596 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.872667 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.872729 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.872794 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:33Z","lastTransitionTime":"2025-12-02T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.975565 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.975608 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.975616 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.975635 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:33 crc kubenswrapper[4747]: I1202 16:43:33.975644 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:33Z","lastTransitionTime":"2025-12-02T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.078520 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.078566 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.078578 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.078596 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.078608 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:34Z","lastTransitionTime":"2025-12-02T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.181141 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.181605 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.181807 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.182018 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.182173 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:34Z","lastTransitionTime":"2025-12-02T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.284666 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.285028 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.285123 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.285247 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.285350 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:34Z","lastTransitionTime":"2025-12-02T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.298309 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs\") pod \"network-metrics-daemon-8brc6\" (UID: \"fe274425-e804-4934-aa14-81ef24981fe9\") " pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:34 crc kubenswrapper[4747]: E1202 16:43:34.298545 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 16:43:34 crc kubenswrapper[4747]: E1202 16:43:34.298639 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs podName:fe274425-e804-4934-aa14-81ef24981fe9 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:42.298613623 +0000 UTC m=+52.825502392 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs") pod "network-metrics-daemon-8brc6" (UID: "fe274425-e804-4934-aa14-81ef24981fe9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.387771 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.387845 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.387865 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.387887 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.387899 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:34Z","lastTransitionTime":"2025-12-02T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.490654 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.491122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.491176 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.491208 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.491224 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:34Z","lastTransitionTime":"2025-12-02T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.595883 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.595961 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.595974 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.595999 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.596012 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:34Z","lastTransitionTime":"2025-12-02T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.700062 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.700112 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.700120 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.700136 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.700146 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:34Z","lastTransitionTime":"2025-12-02T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.760365 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:34 crc kubenswrapper[4747]: E1202 16:43:34.760520 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.802512 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.802573 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.802592 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.802616 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.802633 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:34Z","lastTransitionTime":"2025-12-02T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.905230 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.905288 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.905304 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.905332 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:34 crc kubenswrapper[4747]: I1202 16:43:34.905358 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:34Z","lastTransitionTime":"2025-12-02T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.007870 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.007951 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.007965 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.007984 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.007997 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:35Z","lastTransitionTime":"2025-12-02T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.110721 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.110763 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.110775 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.110788 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.110798 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:35Z","lastTransitionTime":"2025-12-02T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.213524 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.213587 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.213597 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.213614 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.213629 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:35Z","lastTransitionTime":"2025-12-02T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.316179 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.316225 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.316233 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.316257 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.316268 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:35Z","lastTransitionTime":"2025-12-02T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.419441 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.419504 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.419518 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.419537 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.419549 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:35Z","lastTransitionTime":"2025-12-02T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.522646 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.522689 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.522703 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.522723 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.522737 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:35Z","lastTransitionTime":"2025-12-02T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.569968 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.579019 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.589587 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:35Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.602828 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:35Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.614826 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:35Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.625396 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.625715 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.625799 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.625892 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.626004 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:35Z","lastTransitionTime":"2025-12-02T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.626113 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eec908e5-57bd-451d-92da-0f805a5b7254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb377c7c7f2aa97807da6355018d3cb4cdf4bb3a35435e9f095d1718ad081d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ab2655070cc0e23bd5b63b824d193119179b2413ffa8978b4e98b9fbf59bf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-98rgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:35Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.637438 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:35Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.649329 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:35Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.667595 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:35Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.678749 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:35Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.691579 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:35Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.701644 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8brc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe274425-e804-4934-aa14-81ef24981fe9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:26Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8brc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:35Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.719887 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:35Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.728794 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.728843 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.728854 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.728871 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.728883 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:35Z","lastTransitionTime":"2025-12-02T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.743976 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:35Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.755668 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:35Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.760172 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.760235 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:35 crc kubenswrapper[4747]: E1202 16:43:35.760307 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.760193 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:35 crc kubenswrapper[4747]: E1202 16:43:35.760436 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:35 crc kubenswrapper[4747]: E1202 16:43:35.760534 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.767874 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:35Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.778047 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:35Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.793175 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:35Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.810337 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"message\\\":\\\"ator/iptables-alerter-4ln5h in node crc\\\\nI1202 16:43:23.843885 6217 services_controller.go:451] Built service openshift-oauth-apiserver/api cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-oauth-apiserver/api_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-oauth-apiserver/api\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.140\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1202 16:43:23.843927 6217 services_controller.go:452] Built service openshift-oauth-apiserver/api per-node LB for network=default: []services.LB{}\\\\nI1202 16:43:23.843932 6217 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1202 16:43:23.843937 6217 services_controller.go:453] Built service openshift-oauth-apiserver/api template LB for network=default: []services.LB{}\\\\nF1202 16:43:23.843992 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-zmcxm_openshift-ovn-kubernetes(b62a2b51-3b8a-4786-97ee-01d2c6332c83)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:35Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.831512 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.831553 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.831562 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.831577 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.831589 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:35Z","lastTransitionTime":"2025-12-02T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.934677 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.935636 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.935786 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.935945 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:35 crc kubenswrapper[4747]: I1202 16:43:35.936082 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:35Z","lastTransitionTime":"2025-12-02T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.038552 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.038899 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.039027 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.039135 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.039237 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:36Z","lastTransitionTime":"2025-12-02T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.142390 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.142449 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.142463 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.142489 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.142504 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:36Z","lastTransitionTime":"2025-12-02T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.245834 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.245881 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.245892 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.245927 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.245941 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:36Z","lastTransitionTime":"2025-12-02T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.348473 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.348516 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.348527 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.348543 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.348552 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:36Z","lastTransitionTime":"2025-12-02T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.451063 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.451112 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.451126 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.451144 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.451156 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:36Z","lastTransitionTime":"2025-12-02T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.553480 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.553522 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.553533 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.553551 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.553562 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:36Z","lastTransitionTime":"2025-12-02T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.656585 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.656630 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.656640 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.656655 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.656665 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:36Z","lastTransitionTime":"2025-12-02T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.759605 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:36 crc kubenswrapper[4747]: E1202 16:43:36.759852 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.759945 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.759993 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.760013 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.760037 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.760057 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:36Z","lastTransitionTime":"2025-12-02T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.864439 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.864486 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.864497 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.864513 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.864524 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:36Z","lastTransitionTime":"2025-12-02T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.968213 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.968295 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.968359 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.968388 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:36 crc kubenswrapper[4747]: I1202 16:43:36.968412 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:36Z","lastTransitionTime":"2025-12-02T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.071635 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.071695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.071713 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.071738 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.071761 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:37Z","lastTransitionTime":"2025-12-02T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.175159 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.175249 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.175284 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.175324 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.175348 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:37Z","lastTransitionTime":"2025-12-02T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.278030 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.278081 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.278092 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.278112 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.278138 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:37Z","lastTransitionTime":"2025-12-02T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.381297 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.381344 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.381353 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.381372 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.381383 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:37Z","lastTransitionTime":"2025-12-02T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.483593 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.483662 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.483673 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.483693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.483705 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:37Z","lastTransitionTime":"2025-12-02T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.587535 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.587591 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.587601 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.587619 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.587628 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:37Z","lastTransitionTime":"2025-12-02T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.689826 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.689867 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.689875 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.689888 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.689897 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:37Z","lastTransitionTime":"2025-12-02T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.759687 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:37 crc kubenswrapper[4747]: E1202 16:43:37.760005 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.760135 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:37 crc kubenswrapper[4747]: E1202 16:43:37.760220 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.760393 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:37 crc kubenswrapper[4747]: E1202 16:43:37.760661 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.793353 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.793411 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.793432 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.793462 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.793483 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:37Z","lastTransitionTime":"2025-12-02T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.896508 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.896558 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.896567 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.896581 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:37 crc kubenswrapper[4747]: I1202 16:43:37.896592 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:37Z","lastTransitionTime":"2025-12-02T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.000206 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.000271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.000293 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.000324 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.000350 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:38Z","lastTransitionTime":"2025-12-02T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.103333 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.103371 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.103380 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.103394 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.103403 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:38Z","lastTransitionTime":"2025-12-02T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.206606 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.206645 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.206653 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.206669 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.206679 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:38Z","lastTransitionTime":"2025-12-02T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.309085 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.309134 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.309144 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.309163 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.309176 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:38Z","lastTransitionTime":"2025-12-02T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.412443 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.412485 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.412507 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.412530 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.412544 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:38Z","lastTransitionTime":"2025-12-02T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.516086 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.516144 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.516156 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.516175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.516190 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:38Z","lastTransitionTime":"2025-12-02T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.619253 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.619326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.619349 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.619384 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.619407 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:38Z","lastTransitionTime":"2025-12-02T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.721819 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.721892 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.721932 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.721954 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.721963 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:38Z","lastTransitionTime":"2025-12-02T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.759810 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:38 crc kubenswrapper[4747]: E1202 16:43:38.760017 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.824984 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.825064 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.825096 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.825116 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.825128 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:38Z","lastTransitionTime":"2025-12-02T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.928187 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.928242 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.928258 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.928280 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:38 crc kubenswrapper[4747]: I1202 16:43:38.928295 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:38Z","lastTransitionTime":"2025-12-02T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.031753 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.031812 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.031830 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.031856 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.031877 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:39Z","lastTransitionTime":"2025-12-02T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.135058 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.135105 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.135116 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.135134 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.135151 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:39Z","lastTransitionTime":"2025-12-02T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.238340 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.238389 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.238399 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.238415 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.238430 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:39Z","lastTransitionTime":"2025-12-02T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.341619 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.341670 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.341679 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.341696 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.341705 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:39Z","lastTransitionTime":"2025-12-02T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.444779 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.444842 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.444859 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.444886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.444949 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:39Z","lastTransitionTime":"2025-12-02T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.548932 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.548989 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.549008 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.549045 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.549068 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:39Z","lastTransitionTime":"2025-12-02T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.652663 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.652705 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.652719 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.652737 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.652752 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:39Z","lastTransitionTime":"2025-12-02T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.661342 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:43:39 crc kubenswrapper[4747]: E1202 16:43:39.661517 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:11.661494406 +0000 UTC m=+82.188383165 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.661558 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.661595 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.661626 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.661655 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:39 crc kubenswrapper[4747]: E1202 16:43:39.661712 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 16:43:39 crc kubenswrapper[4747]: E1202 16:43:39.661741 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 16:43:39 crc kubenswrapper[4747]: E1202 16:43:39.661760 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:39 crc kubenswrapper[4747]: E1202 16:43:39.661766 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 16:43:39 crc kubenswrapper[4747]: E1202 16:43:39.661793 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 16:43:39 crc kubenswrapper[4747]: E1202 16:43:39.661809 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:39 crc kubenswrapper[4747]: E1202 16:43:39.661817 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 16:43:39 crc kubenswrapper[4747]: E1202 16:43:39.661820 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 16:43:39 crc kubenswrapper[4747]: E1202 16:43:39.661828 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 16:44:11.661808235 +0000 UTC m=+82.188697024 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:39 crc kubenswrapper[4747]: E1202 16:43:39.662094 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 16:44:11.66202126 +0000 UTC m=+82.188910009 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:43:39 crc kubenswrapper[4747]: E1202 16:43:39.662146 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 16:44:11.662132024 +0000 UTC m=+82.189021013 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 16:43:39 crc kubenswrapper[4747]: E1202 16:43:39.662183 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 16:44:11.662168005 +0000 UTC m=+82.189056994 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.756145 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.756214 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.756239 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.756271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.756298 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:39Z","lastTransitionTime":"2025-12-02T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.759865 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.759865 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:39 crc kubenswrapper[4747]: E1202 16:43:39.760264 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.760313 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:39 crc kubenswrapper[4747]: E1202 16:43:39.760476 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:39 crc kubenswrapper[4747]: E1202 16:43:39.760626 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.786129 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:39Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.804742 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae4844a0-1c52-4172-a301-d45444e393ee\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b04826df65b6eeb39190ed96d8a9b19e6c6d512c9959e7fcba556f81cdb6ae4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cee76ff07172c41ea65c21e54f8951d0223d6576e71d2e4ccf80001b5e3b9190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb72d2b6b402f07ecf53060ed7662c37883ec1e95b3b2bd130097e4190a6f16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:39Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.832980 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:39Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.853237 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:39Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.860271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.860396 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.860415 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.860439 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.860480 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:39Z","lastTransitionTime":"2025-12-02T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.867373 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:39Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.883248 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:39Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.898997 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:39Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.915894 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:39Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.931404 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:39Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.946376 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8brc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe274425-e804-4934-aa14-81ef24981fe9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:26Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8brc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:39Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.963034 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.963082 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.963098 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.963118 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.963134 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:39Z","lastTransitionTime":"2025-12-02T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.966712 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:39Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.982382 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:39Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:39 crc kubenswrapper[4747]: I1202 16:43:39.998720 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:39Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.022603 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"message\\\":\\\"ator/iptables-alerter-4ln5h in node crc\\\\nI1202 16:43:23.843885 6217 services_controller.go:451] Built service openshift-oauth-apiserver/api cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-oauth-apiserver/api_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-oauth-apiserver/api\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.140\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1202 16:43:23.843927 6217 services_controller.go:452] Built service openshift-oauth-apiserver/api per-node LB for network=default: []services.LB{}\\\\nI1202 16:43:23.843932 6217 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1202 16:43:23.843937 6217 services_controller.go:453] Built service openshift-oauth-apiserver/api template LB for network=default: []services.LB{}\\\\nF1202 16:43:23.843992 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-zmcxm_openshift-ovn-kubernetes(b62a2b51-3b8a-4786-97ee-01d2c6332c83)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:40Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.040462 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:40Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.061790 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:40Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.065577 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.065617 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.065630 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.065647 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.065659 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:40Z","lastTransitionTime":"2025-12-02T16:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.080116 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:40Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.094576 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eec908e5-57bd-451d-92da-0f805a5b7254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb377c7c7f2aa97807da6355018d3cb4cdf4bb3a35435e9f095d1718ad081d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ab2655070cc0e23bd5b63b824d193119179b2413ffa8978b4e98b9fbf59bf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-98rgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:40Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.176679 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.176717 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.176725 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.176740 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.176750 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:40Z","lastTransitionTime":"2025-12-02T16:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.278708 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.278754 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.278762 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.278779 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.278792 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:40Z","lastTransitionTime":"2025-12-02T16:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.382013 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.382069 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.382082 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.382099 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.382111 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:40Z","lastTransitionTime":"2025-12-02T16:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.485986 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.486103 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.486129 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.486161 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.486184 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:40Z","lastTransitionTime":"2025-12-02T16:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.588791 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.588852 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.588864 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.588885 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.588896 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:40Z","lastTransitionTime":"2025-12-02T16:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.693115 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.693184 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.693202 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.693226 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.693249 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:40Z","lastTransitionTime":"2025-12-02T16:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.759986 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:40 crc kubenswrapper[4747]: E1202 16:43:40.760156 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.795824 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.795874 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.795889 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.795926 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.795937 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:40Z","lastTransitionTime":"2025-12-02T16:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.899254 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.899326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.899346 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.899373 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:40 crc kubenswrapper[4747]: I1202 16:43:40.899390 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:40Z","lastTransitionTime":"2025-12-02T16:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.003640 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.003693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.003731 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.003752 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.003763 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:41Z","lastTransitionTime":"2025-12-02T16:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.069757 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.069801 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.069809 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.069826 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.069835 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:41Z","lastTransitionTime":"2025-12-02T16:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:41 crc kubenswrapper[4747]: E1202 16:43:41.085683 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:41Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.089465 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.089499 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.089510 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.089528 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.089540 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:41Z","lastTransitionTime":"2025-12-02T16:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:41 crc kubenswrapper[4747]: E1202 16:43:41.099429 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:41Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.102863 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.103040 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.103136 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.103224 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.103313 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:41Z","lastTransitionTime":"2025-12-02T16:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:41 crc kubenswrapper[4747]: E1202 16:43:41.114526 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:41Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.117452 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.117478 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.117487 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.117509 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.117519 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:41Z","lastTransitionTime":"2025-12-02T16:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:41 crc kubenswrapper[4747]: E1202 16:43:41.127712 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:41Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.130720 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.130766 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.130775 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.130795 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.130806 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:41Z","lastTransitionTime":"2025-12-02T16:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:41 crc kubenswrapper[4747]: E1202 16:43:41.142333 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:41Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:41 crc kubenswrapper[4747]: E1202 16:43:41.142443 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.143766 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.143805 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.143820 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.143835 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.143847 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:41Z","lastTransitionTime":"2025-12-02T16:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.247477 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.247542 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.247563 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.247593 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.247618 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:41Z","lastTransitionTime":"2025-12-02T16:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.351050 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.351117 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.351144 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.351179 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.351204 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:41Z","lastTransitionTime":"2025-12-02T16:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.455503 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.455580 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.455608 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.455639 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.455661 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:41Z","lastTransitionTime":"2025-12-02T16:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.559273 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.559348 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.559369 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.559394 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.559413 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:41Z","lastTransitionTime":"2025-12-02T16:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.662299 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.662371 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.662382 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.662402 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.662413 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:41Z","lastTransitionTime":"2025-12-02T16:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.760186 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:41 crc kubenswrapper[4747]: E1202 16:43:41.760423 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.760217 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.760488 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:41 crc kubenswrapper[4747]: E1202 16:43:41.760610 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:41 crc kubenswrapper[4747]: E1202 16:43:41.760758 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.765892 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.765940 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.765951 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.765964 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.765975 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:41Z","lastTransitionTime":"2025-12-02T16:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.869886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.870327 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.870410 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.870537 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.870649 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:41Z","lastTransitionTime":"2025-12-02T16:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.974099 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.974143 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.974154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.974172 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:41 crc kubenswrapper[4747]: I1202 16:43:41.974185 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:41Z","lastTransitionTime":"2025-12-02T16:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.076350 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.076460 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.076471 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.076488 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.076502 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:42Z","lastTransitionTime":"2025-12-02T16:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.179085 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.179502 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.179646 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.179779 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.179945 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:42Z","lastTransitionTime":"2025-12-02T16:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.283184 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.283248 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.283265 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.283318 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.283336 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:42Z","lastTransitionTime":"2025-12-02T16:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.300188 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs\") pod \"network-metrics-daemon-8brc6\" (UID: \"fe274425-e804-4934-aa14-81ef24981fe9\") " pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:42 crc kubenswrapper[4747]: E1202 16:43:42.300354 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 16:43:42 crc kubenswrapper[4747]: E1202 16:43:42.300414 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs podName:fe274425-e804-4934-aa14-81ef24981fe9 nodeName:}" failed. No retries permitted until 2025-12-02 16:43:58.300398476 +0000 UTC m=+68.827287225 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs") pod "network-metrics-daemon-8brc6" (UID: "fe274425-e804-4934-aa14-81ef24981fe9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.386126 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.386208 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.386224 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.386250 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.386267 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:42Z","lastTransitionTime":"2025-12-02T16:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.488440 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.488489 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.488503 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.488518 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.488528 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:42Z","lastTransitionTime":"2025-12-02T16:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.591399 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.591859 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.591970 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.592065 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.592150 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:42Z","lastTransitionTime":"2025-12-02T16:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.695000 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.695079 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.695096 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.695121 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.695141 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:42Z","lastTransitionTime":"2025-12-02T16:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.759657 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:42 crc kubenswrapper[4747]: E1202 16:43:42.760393 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.760967 4747 scope.go:117] "RemoveContainer" containerID="8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.797405 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.797743 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.797762 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.797787 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.797806 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:42Z","lastTransitionTime":"2025-12-02T16:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.899926 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.900241 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.900312 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.900383 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:42 crc kubenswrapper[4747]: I1202 16:43:42.900443 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:42Z","lastTransitionTime":"2025-12-02T16:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.003698 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.004066 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.004158 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.004250 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.004339 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:43Z","lastTransitionTime":"2025-12-02T16:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.107403 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.107459 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.107476 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.107526 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.107547 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:43Z","lastTransitionTime":"2025-12-02T16:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.213769 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.213870 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.213887 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.213927 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.213943 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:43Z","lastTransitionTime":"2025-12-02T16:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.317417 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.317476 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.317489 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.317509 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.317522 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:43Z","lastTransitionTime":"2025-12-02T16:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.420740 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.420787 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.420799 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.420815 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.420829 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:43Z","lastTransitionTime":"2025-12-02T16:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.524316 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.524364 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.524373 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.524389 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.524399 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:43Z","lastTransitionTime":"2025-12-02T16:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.626993 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.627348 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.627364 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.627381 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.627391 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:43Z","lastTransitionTime":"2025-12-02T16:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.730487 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.730535 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.730552 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.730573 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.730584 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:43Z","lastTransitionTime":"2025-12-02T16:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.760282 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.760344 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.760370 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:43 crc kubenswrapper[4747]: E1202 16:43:43.760453 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:43 crc kubenswrapper[4747]: E1202 16:43:43.760512 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:43 crc kubenswrapper[4747]: E1202 16:43:43.760598 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.833342 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.833398 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.833409 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.833429 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.833442 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:43Z","lastTransitionTime":"2025-12-02T16:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.935728 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.935787 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.935801 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.935822 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:43 crc kubenswrapper[4747]: I1202 16:43:43.935836 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:43Z","lastTransitionTime":"2025-12-02T16:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.038148 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.038198 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.038213 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.038235 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.038252 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:44Z","lastTransitionTime":"2025-12-02T16:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.125705 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zmcxm_b62a2b51-3b8a-4786-97ee-01d2c6332c83/ovnkube-controller/1.log" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.130136 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerStarted","Data":"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a"} Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.130820 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.140954 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.141000 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.141019 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.141039 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.141051 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:44Z","lastTransitionTime":"2025-12-02T16:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.150868 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.167144 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae4844a0-1c52-4172-a301-d45444e393ee\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b04826df65b6eeb39190ed96d8a9b19e6c6d512c9959e7fcba556f81cdb6ae4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cee76ff07172c41ea65c21e54f8951d0223d6576e71d2e4ccf80001b5e3b9190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb72d2b6b402f07ecf53060ed7662c37883ec1e95b3b2bd130097e4190a6f16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.190833 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.205181 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.217421 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.233773 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.244018 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.244067 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.244083 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.244102 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.244115 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:44Z","lastTransitionTime":"2025-12-02T16:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.250428 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.268516 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.282054 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.295236 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8brc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe274425-e804-4934-aa14-81ef24981fe9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:26Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8brc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.311141 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.322006 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.344054 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.346087 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.346147 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.346162 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.346188 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.346202 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:44Z","lastTransitionTime":"2025-12-02T16:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.372876 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"message\\\":\\\"ator/iptables-alerter-4ln5h in node crc\\\\nI1202 16:43:23.843885 6217 services_controller.go:451] Built service openshift-oauth-apiserver/api cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-oauth-apiserver/api_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-oauth-apiserver/api\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.140\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1202 16:43:23.843927 6217 services_controller.go:452] Built service openshift-oauth-apiserver/api per-node LB for network=default: []services.LB{}\\\\nI1202 16:43:23.843932 6217 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1202 16:43:23.843937 6217 services_controller.go:453] Built service openshift-oauth-apiserver/api template LB for network=default: []services.LB{}\\\\nF1202 16:43:23.843992 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.389724 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.403176 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.419521 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.435246 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eec908e5-57bd-451d-92da-0f805a5b7254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb377c7c7f2aa97807da6355018d3cb4cdf4bb3a35435e9f095d1718ad081d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ab2655070cc0e23bd5b63b824d193119179b2413ffa8978b4e98b9fbf59bf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-98rgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:44Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.449898 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.449970 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.449983 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.450001 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.450012 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:44Z","lastTransitionTime":"2025-12-02T16:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.553509 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.553556 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.553565 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.553581 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.553594 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:44Z","lastTransitionTime":"2025-12-02T16:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.656160 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.656281 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.656304 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.656324 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.656337 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:44Z","lastTransitionTime":"2025-12-02T16:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.759215 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.759255 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.759264 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.759278 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.759287 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:44Z","lastTransitionTime":"2025-12-02T16:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.759440 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:44 crc kubenswrapper[4747]: E1202 16:43:44.759627 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.861706 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.861738 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.861747 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.861762 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.861772 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:44Z","lastTransitionTime":"2025-12-02T16:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.964692 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.964759 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.964774 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.964793 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:44 crc kubenswrapper[4747]: I1202 16:43:44.964805 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:44Z","lastTransitionTime":"2025-12-02T16:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.067969 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.068024 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.068035 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.068053 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.068066 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:45Z","lastTransitionTime":"2025-12-02T16:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.137358 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zmcxm_b62a2b51-3b8a-4786-97ee-01d2c6332c83/ovnkube-controller/2.log" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.138675 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zmcxm_b62a2b51-3b8a-4786-97ee-01d2c6332c83/ovnkube-controller/1.log" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.142173 4747 generic.go:334] "Generic (PLEG): container finished" podID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerID="8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a" exitCode=1 Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.142226 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerDied","Data":"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a"} Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.142278 4747 scope.go:117] "RemoveContainer" containerID="8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.143657 4747 scope.go:117] "RemoveContainer" containerID="8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a" Dec 02 16:43:45 crc kubenswrapper[4747]: E1202 16:43:45.144031 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-zmcxm_openshift-ovn-kubernetes(b62a2b51-3b8a-4786-97ee-01d2c6332c83)\"" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.171092 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.171157 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.171175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.171202 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.171219 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:45Z","lastTransitionTime":"2025-12-02T16:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.174305 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.187682 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae4844a0-1c52-4172-a301-d45444e393ee\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b04826df65b6eeb39190ed96d8a9b19e6c6d512c9959e7fcba556f81cdb6ae4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cee76ff07172c41ea65c21e54f8951d0223d6576e71d2e4ccf80001b5e3b9190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb72d2b6b402f07ecf53060ed7662c37883ec1e95b3b2bd130097e4190a6f16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.212063 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.227779 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.239568 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.255686 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.272499 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.273804 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.273834 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.273842 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.273856 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.273866 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:45Z","lastTransitionTime":"2025-12-02T16:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.287459 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.299615 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.312901 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8brc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe274425-e804-4934-aa14-81ef24981fe9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:26Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8brc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.326769 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.336221 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.348978 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.366975 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f4bbdb953d655031c0f640305c17f21ee62479a8dd7903d01e6486076b12db4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"message\\\":\\\"ator/iptables-alerter-4ln5h in node crc\\\\nI1202 16:43:23.843885 6217 services_controller.go:451] Built service openshift-oauth-apiserver/api cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-oauth-apiserver/api_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-oauth-apiserver/api\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.140\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1202 16:43:23.843927 6217 services_controller.go:452] Built service openshift-oauth-apiserver/api per-node LB for network=default: []services.LB{}\\\\nI1202 16:43:23.843932 6217 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1202 16:43:23.843937 6217 services_controller.go:453] Built service openshift-oauth-apiserver/api template LB for network=default: []services.LB{}\\\\nF1202 16:43:23.843992 6217 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:44Z\\\",\\\"message\\\":\\\"generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 16:43:43.942546 6437 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1202 16:43:43.944311 6437 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-oauth-apiserver/api]} name:Service_openshift-oauth-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.140:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {fe46cb89-4e54-4175-a112-1c5224cd299e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 16:43:43.942423 6437 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 16:43:43.944424 6437 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.376213 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.376274 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.376290 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.376309 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.376321 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:45Z","lastTransitionTime":"2025-12-02T16:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.380355 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.392888 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.405029 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.419062 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eec908e5-57bd-451d-92da-0f805a5b7254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb377c7c7f2aa97807da6355018d3cb4cdf4bb3a35435e9f095d1718ad081d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ab2655070cc0e23bd5b63b824d193119179b2413ffa8978b4e98b9fbf59bf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-98rgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:45Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.478414 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.478453 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.478462 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.478477 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.478488 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:45Z","lastTransitionTime":"2025-12-02T16:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.582380 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.582455 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.582475 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.582499 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.582525 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:45Z","lastTransitionTime":"2025-12-02T16:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.685315 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.685371 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.685381 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.685400 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.685411 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:45Z","lastTransitionTime":"2025-12-02T16:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.760186 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:45 crc kubenswrapper[4747]: E1202 16:43:45.760369 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.760405 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:45 crc kubenswrapper[4747]: E1202 16:43:45.760538 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.760212 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:45 crc kubenswrapper[4747]: E1202 16:43:45.760612 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.788227 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.788283 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.788292 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.788309 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.788321 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:45Z","lastTransitionTime":"2025-12-02T16:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.890958 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.891007 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.891018 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.891036 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.891046 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:45Z","lastTransitionTime":"2025-12-02T16:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.993656 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.993702 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.993715 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.993731 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:45 crc kubenswrapper[4747]: I1202 16:43:45.993740 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:45Z","lastTransitionTime":"2025-12-02T16:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.096887 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.096981 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.096997 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.097019 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.097032 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:46Z","lastTransitionTime":"2025-12-02T16:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.146775 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zmcxm_b62a2b51-3b8a-4786-97ee-01d2c6332c83/ovnkube-controller/2.log" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.149517 4747 scope.go:117] "RemoveContainer" containerID="8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a" Dec 02 16:43:46 crc kubenswrapper[4747]: E1202 16:43:46.149670 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-zmcxm_openshift-ovn-kubernetes(b62a2b51-3b8a-4786-97ee-01d2c6332c83)\"" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.160388 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.173131 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.183406 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae4844a0-1c52-4172-a301-d45444e393ee\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b04826df65b6eeb39190ed96d8a9b19e6c6d512c9959e7fcba556f81cdb6ae4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cee76ff07172c41ea65c21e54f8951d0223d6576e71d2e4ccf80001b5e3b9190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb72d2b6b402f07ecf53060ed7662c37883ec1e95b3b2bd130097e4190a6f16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.199019 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.199063 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.199073 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.199088 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.199099 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:46Z","lastTransitionTime":"2025-12-02T16:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.200666 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.213869 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.226801 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.240161 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8brc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe274425-e804-4934-aa14-81ef24981fe9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:26Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8brc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.253570 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.270320 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.285168 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.299628 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.301172 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.301239 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.301256 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.301282 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.301297 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:46Z","lastTransitionTime":"2025-12-02T16:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.312018 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.327868 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.348953 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:44Z\\\",\\\"message\\\":\\\"generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 16:43:43.942546 6437 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1202 16:43:43.944311 6437 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-oauth-apiserver/api]} name:Service_openshift-oauth-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.140:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {fe46cb89-4e54-4175-a112-1c5224cd299e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 16:43:43.942423 6437 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 16:43:43.944424 6437 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-zmcxm_openshift-ovn-kubernetes(b62a2b51-3b8a-4786-97ee-01d2c6332c83)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.364048 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.377242 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.391168 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.404042 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.404095 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.404107 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.404129 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.404140 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:46Z","lastTransitionTime":"2025-12-02T16:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.404604 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eec908e5-57bd-451d-92da-0f805a5b7254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb377c7c7f2aa97807da6355018d3cb4cdf4bb3a35435e9f095d1718ad081d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ab2655070cc0e23bd5b63b824d193119179b2413ffa8978b4e98b9fbf59bf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-98rgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:46Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.506314 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.506346 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.506357 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.506373 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.506385 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:46Z","lastTransitionTime":"2025-12-02T16:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.609081 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.609121 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.609130 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.609148 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.609159 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:46Z","lastTransitionTime":"2025-12-02T16:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.711851 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.711936 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.711956 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.711977 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.711990 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:46Z","lastTransitionTime":"2025-12-02T16:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.759810 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:46 crc kubenswrapper[4747]: E1202 16:43:46.760015 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.814702 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.814764 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.814781 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.814807 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.814824 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:46Z","lastTransitionTime":"2025-12-02T16:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.918340 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.918394 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.918407 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.918427 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:46 crc kubenswrapper[4747]: I1202 16:43:46.918446 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:46Z","lastTransitionTime":"2025-12-02T16:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.021208 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.021258 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.021269 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.021288 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.021303 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:47Z","lastTransitionTime":"2025-12-02T16:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.124124 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.124173 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.124183 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.124200 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.124217 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:47Z","lastTransitionTime":"2025-12-02T16:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.231615 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.231777 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.232507 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.232546 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.232557 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:47Z","lastTransitionTime":"2025-12-02T16:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.336631 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.336711 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.336734 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.336763 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.336786 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:47Z","lastTransitionTime":"2025-12-02T16:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.440876 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.440999 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.441016 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.441040 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.441057 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:47Z","lastTransitionTime":"2025-12-02T16:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.544193 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.544247 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.544261 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.544279 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.544291 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:47Z","lastTransitionTime":"2025-12-02T16:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.647619 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.647658 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.647667 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.647701 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.647711 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:47Z","lastTransitionTime":"2025-12-02T16:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.750663 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.750703 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.750714 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.750729 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.750740 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:47Z","lastTransitionTime":"2025-12-02T16:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.760080 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:47 crc kubenswrapper[4747]: E1202 16:43:47.760218 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.760447 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:47 crc kubenswrapper[4747]: E1202 16:43:47.760536 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.761139 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:47 crc kubenswrapper[4747]: E1202 16:43:47.761222 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.853821 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.853886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.853936 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.853970 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.853987 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:47Z","lastTransitionTime":"2025-12-02T16:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.956882 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.956946 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.956958 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.956974 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:47 crc kubenswrapper[4747]: I1202 16:43:47.956984 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:47Z","lastTransitionTime":"2025-12-02T16:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.060674 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.061067 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.061201 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.061329 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.061512 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:48Z","lastTransitionTime":"2025-12-02T16:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.164859 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.164916 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.164925 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.164941 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.164951 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:48Z","lastTransitionTime":"2025-12-02T16:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.267778 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.267832 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.267842 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.267858 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.267869 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:48Z","lastTransitionTime":"2025-12-02T16:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.370040 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.370086 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.370095 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.370110 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.370120 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:48Z","lastTransitionTime":"2025-12-02T16:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.472859 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.472945 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.472958 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.472980 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.472995 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:48Z","lastTransitionTime":"2025-12-02T16:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.576363 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.576428 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.576443 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.576465 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.576479 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:48Z","lastTransitionTime":"2025-12-02T16:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.679722 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.679806 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.679815 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.679833 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.679844 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:48Z","lastTransitionTime":"2025-12-02T16:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.760410 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:48 crc kubenswrapper[4747]: E1202 16:43:48.760654 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.782498 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.782571 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.782595 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.782626 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.782650 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:48Z","lastTransitionTime":"2025-12-02T16:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.884957 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.885014 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.885032 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.885051 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.885063 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:48Z","lastTransitionTime":"2025-12-02T16:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.987584 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.987634 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.987643 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.987657 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:48 crc kubenswrapper[4747]: I1202 16:43:48.987667 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:48Z","lastTransitionTime":"2025-12-02T16:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.090257 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.090323 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.090338 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.090359 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.090378 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:49Z","lastTransitionTime":"2025-12-02T16:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.193696 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.193731 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.193739 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.193755 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.193766 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:49Z","lastTransitionTime":"2025-12-02T16:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.297273 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.297330 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.297338 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.297357 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.297367 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:49Z","lastTransitionTime":"2025-12-02T16:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.399685 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.399727 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.399739 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.399757 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.399770 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:49Z","lastTransitionTime":"2025-12-02T16:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.503028 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.503101 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.503118 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.503140 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.503151 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:49Z","lastTransitionTime":"2025-12-02T16:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.606296 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.606339 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.606354 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.606375 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.606391 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:49Z","lastTransitionTime":"2025-12-02T16:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.710694 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.710736 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.710745 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.710761 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.710771 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:49Z","lastTransitionTime":"2025-12-02T16:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.759802 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.759880 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:49 crc kubenswrapper[4747]: E1202 16:43:49.760083 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.760132 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:49 crc kubenswrapper[4747]: E1202 16:43:49.760267 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:49 crc kubenswrapper[4747]: E1202 16:43:49.760482 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.775367 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:49Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.788851 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:49Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.805971 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eec908e5-57bd-451d-92da-0f805a5b7254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb377c7c7f2aa97807da6355018d3cb4cdf4bb3a35435e9f095d1718ad081d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ab2655070cc0e23bd5b63b824d193119179b2413ffa8978b4e98b9fbf59bf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-98rgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:49Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.815606 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.815850 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.815986 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.816082 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.816164 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:49Z","lastTransitionTime":"2025-12-02T16:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.821467 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:49Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.839232 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae4844a0-1c52-4172-a301-d45444e393ee\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b04826df65b6eeb39190ed96d8a9b19e6c6d512c9959e7fcba556f81cdb6ae4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cee76ff07172c41ea65c21e54f8951d0223d6576e71d2e4ccf80001b5e3b9190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb72d2b6b402f07ecf53060ed7662c37883ec1e95b3b2bd130097e4190a6f16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:49Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.859564 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:49Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.876899 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:49Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.889818 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:49Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.904381 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:49Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.919301 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:49Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.919803 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.919931 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.919998 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.920080 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.920150 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:49Z","lastTransitionTime":"2025-12-02T16:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.935286 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:49Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.948751 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:49Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.960853 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8brc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe274425-e804-4934-aa14-81ef24981fe9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:26Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8brc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:49Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.976225 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:49Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:49 crc kubenswrapper[4747]: I1202 16:43:49.987106 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:49Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.001562 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:49Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.023384 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.023430 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.023445 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.023464 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.023476 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:50Z","lastTransitionTime":"2025-12-02T16:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.025260 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:44Z\\\",\\\"message\\\":\\\"generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 16:43:43.942546 6437 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1202 16:43:43.944311 6437 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-oauth-apiserver/api]} name:Service_openshift-oauth-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.140:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {fe46cb89-4e54-4175-a112-1c5224cd299e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 16:43:43.942423 6437 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 16:43:43.944424 6437 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-zmcxm_openshift-ovn-kubernetes(b62a2b51-3b8a-4786-97ee-01d2c6332c83)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:50Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.044734 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:50Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.127582 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.127960 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.128171 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.128386 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.128565 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:50Z","lastTransitionTime":"2025-12-02T16:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.231207 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.231283 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.231295 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.231313 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.231325 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:50Z","lastTransitionTime":"2025-12-02T16:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.333889 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.333943 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.333951 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.333965 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.333974 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:50Z","lastTransitionTime":"2025-12-02T16:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.436253 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.436284 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.436294 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.436308 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.436317 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:50Z","lastTransitionTime":"2025-12-02T16:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.539323 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.539394 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.539435 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.539455 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.539468 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:50Z","lastTransitionTime":"2025-12-02T16:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.642642 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.642674 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.642682 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.642695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.642705 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:50Z","lastTransitionTime":"2025-12-02T16:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.745243 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.745271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.745281 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.745295 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.745303 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:50Z","lastTransitionTime":"2025-12-02T16:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.759485 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:50 crc kubenswrapper[4747]: E1202 16:43:50.759635 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.847325 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.847353 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.847361 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.847374 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.847383 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:50Z","lastTransitionTime":"2025-12-02T16:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.950441 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.950478 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.950487 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.950503 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:50 crc kubenswrapper[4747]: I1202 16:43:50.950514 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:50Z","lastTransitionTime":"2025-12-02T16:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.053431 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.054221 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.054278 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.054311 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.054326 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:51Z","lastTransitionTime":"2025-12-02T16:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.157727 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.157801 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.157816 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.157840 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.157859 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:51Z","lastTransitionTime":"2025-12-02T16:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.260031 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.260266 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.260281 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.260301 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.260313 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:51Z","lastTransitionTime":"2025-12-02T16:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.363411 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.363465 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.363478 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.363503 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.363517 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:51Z","lastTransitionTime":"2025-12-02T16:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.416945 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.417047 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.417057 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.417076 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.417086 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:51Z","lastTransitionTime":"2025-12-02T16:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:51 crc kubenswrapper[4747]: E1202 16:43:51.435192 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:51Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.439527 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.439610 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.439626 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.439648 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.439663 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:51Z","lastTransitionTime":"2025-12-02T16:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:51 crc kubenswrapper[4747]: E1202 16:43:51.453686 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:51Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.457836 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.457884 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.457899 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.457942 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.457969 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:51Z","lastTransitionTime":"2025-12-02T16:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:51 crc kubenswrapper[4747]: E1202 16:43:51.470982 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:51Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.476221 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.476271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.476286 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.476305 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.476317 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:51Z","lastTransitionTime":"2025-12-02T16:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:51 crc kubenswrapper[4747]: E1202 16:43:51.489524 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:51Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.492998 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.493049 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.493063 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.493088 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.493100 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:51Z","lastTransitionTime":"2025-12-02T16:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:51 crc kubenswrapper[4747]: E1202 16:43:51.505230 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:51Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:51 crc kubenswrapper[4747]: E1202 16:43:51.505345 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.510986 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.511031 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.511041 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.511055 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.511065 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:51Z","lastTransitionTime":"2025-12-02T16:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.613512 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.613553 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.613562 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.613578 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.613588 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:51Z","lastTransitionTime":"2025-12-02T16:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.715758 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.715819 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.715836 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.715852 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.715862 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:51Z","lastTransitionTime":"2025-12-02T16:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.760397 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:51 crc kubenswrapper[4747]: E1202 16:43:51.760577 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.760776 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:51 crc kubenswrapper[4747]: E1202 16:43:51.760860 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.761046 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:51 crc kubenswrapper[4747]: E1202 16:43:51.761107 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.818556 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.818584 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.818594 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.818608 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.818618 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:51Z","lastTransitionTime":"2025-12-02T16:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.920546 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.920575 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.920582 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.920597 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:51 crc kubenswrapper[4747]: I1202 16:43:51.920606 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:51Z","lastTransitionTime":"2025-12-02T16:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.022934 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.022962 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.022969 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.022982 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.022990 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:52Z","lastTransitionTime":"2025-12-02T16:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.125974 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.126020 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.126031 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.126048 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.126063 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:52Z","lastTransitionTime":"2025-12-02T16:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.228739 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.228769 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.228777 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.228791 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.228799 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:52Z","lastTransitionTime":"2025-12-02T16:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.332081 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.332149 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.332168 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.332191 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.332207 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:52Z","lastTransitionTime":"2025-12-02T16:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.434788 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.434842 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.434853 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.434870 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.434881 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:52Z","lastTransitionTime":"2025-12-02T16:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.538099 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.538164 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.538174 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.538196 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.538208 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:52Z","lastTransitionTime":"2025-12-02T16:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.641103 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.641141 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.641151 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.641167 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.641175 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:52Z","lastTransitionTime":"2025-12-02T16:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.743303 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.743363 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.743375 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.743393 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.743408 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:52Z","lastTransitionTime":"2025-12-02T16:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.759964 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:52 crc kubenswrapper[4747]: E1202 16:43:52.760149 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.845524 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.845594 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.845611 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.845631 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.845642 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:52Z","lastTransitionTime":"2025-12-02T16:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.948526 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.948591 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.948604 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.948625 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:52 crc kubenswrapper[4747]: I1202 16:43:52.948638 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:52Z","lastTransitionTime":"2025-12-02T16:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.051047 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.051228 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.051250 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.051274 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.051288 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:53Z","lastTransitionTime":"2025-12-02T16:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.153773 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.153807 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.153833 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.153850 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.153860 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:53Z","lastTransitionTime":"2025-12-02T16:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.256972 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.257027 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.257037 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.257054 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.257065 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:53Z","lastTransitionTime":"2025-12-02T16:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.359785 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.359868 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.359886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.359941 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.359961 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:53Z","lastTransitionTime":"2025-12-02T16:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.463429 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.463474 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.463487 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.463507 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.463522 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:53Z","lastTransitionTime":"2025-12-02T16:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.566466 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.566512 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.566522 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.566539 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.566549 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:53Z","lastTransitionTime":"2025-12-02T16:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.669390 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.669438 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.669447 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.669463 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.669473 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:53Z","lastTransitionTime":"2025-12-02T16:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.759616 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.759689 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.759641 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:53 crc kubenswrapper[4747]: E1202 16:43:53.759800 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:53 crc kubenswrapper[4747]: E1202 16:43:53.759956 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:53 crc kubenswrapper[4747]: E1202 16:43:53.760221 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.771693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.771730 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.771741 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.771754 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.771765 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:53Z","lastTransitionTime":"2025-12-02T16:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.874603 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.875034 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.875046 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.875062 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.875074 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:53Z","lastTransitionTime":"2025-12-02T16:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.978188 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.978240 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.978253 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.978269 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:53 crc kubenswrapper[4747]: I1202 16:43:53.978278 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:53Z","lastTransitionTime":"2025-12-02T16:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.080893 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.080955 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.080966 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.080983 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.080995 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:54Z","lastTransitionTime":"2025-12-02T16:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.184479 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.184547 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.184560 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.184582 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.184594 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:54Z","lastTransitionTime":"2025-12-02T16:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.288442 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.288502 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.288512 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.288531 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.288544 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:54Z","lastTransitionTime":"2025-12-02T16:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.391247 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.391312 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.391324 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.391345 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.391355 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:54Z","lastTransitionTime":"2025-12-02T16:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.494398 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.494446 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.494457 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.494474 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.494487 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:54Z","lastTransitionTime":"2025-12-02T16:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.597788 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.597839 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.597849 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.597868 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.597879 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:54Z","lastTransitionTime":"2025-12-02T16:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.701200 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.701271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.701286 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.701313 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.701329 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:54Z","lastTransitionTime":"2025-12-02T16:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.760054 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:54 crc kubenswrapper[4747]: E1202 16:43:54.760255 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.804305 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.804369 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.804378 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.804394 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.804403 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:54Z","lastTransitionTime":"2025-12-02T16:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.907583 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.907638 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.907647 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.907660 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:54 crc kubenswrapper[4747]: I1202 16:43:54.907671 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:54Z","lastTransitionTime":"2025-12-02T16:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.010329 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.010384 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.010399 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.010420 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.010433 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:55Z","lastTransitionTime":"2025-12-02T16:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.112422 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.112472 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.112480 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.112495 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.112503 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:55Z","lastTransitionTime":"2025-12-02T16:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.215154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.215208 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.215217 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.215234 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.215245 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:55Z","lastTransitionTime":"2025-12-02T16:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.317677 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.317730 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.317742 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.317760 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.317773 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:55Z","lastTransitionTime":"2025-12-02T16:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.420634 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.420690 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.420704 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.420724 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.420739 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:55Z","lastTransitionTime":"2025-12-02T16:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.523361 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.523407 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.523419 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.523437 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.523450 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:55Z","lastTransitionTime":"2025-12-02T16:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.626230 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.626303 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.626316 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.626332 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.626343 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:55Z","lastTransitionTime":"2025-12-02T16:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.728794 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.728849 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.728862 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.728882 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.728895 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:55Z","lastTransitionTime":"2025-12-02T16:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.759874 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.759954 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.759896 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:55 crc kubenswrapper[4747]: E1202 16:43:55.760040 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:55 crc kubenswrapper[4747]: E1202 16:43:55.760114 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:55 crc kubenswrapper[4747]: E1202 16:43:55.760282 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.832006 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.832052 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.832063 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.832080 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.832090 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:55Z","lastTransitionTime":"2025-12-02T16:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.935306 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.935355 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.935366 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.935385 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:55 crc kubenswrapper[4747]: I1202 16:43:55.935404 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:55Z","lastTransitionTime":"2025-12-02T16:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.038669 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.038742 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.038754 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.038772 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.038784 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:56Z","lastTransitionTime":"2025-12-02T16:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.141333 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.141371 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.141382 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.141399 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.141410 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:56Z","lastTransitionTime":"2025-12-02T16:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.243435 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.243474 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.243487 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.243504 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.243514 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:56Z","lastTransitionTime":"2025-12-02T16:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.346283 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.346332 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.346343 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.346357 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.346367 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:56Z","lastTransitionTime":"2025-12-02T16:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.448952 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.448991 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.448999 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.449012 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.449022 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:56Z","lastTransitionTime":"2025-12-02T16:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.551694 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.551764 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.551777 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.551799 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.551816 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:56Z","lastTransitionTime":"2025-12-02T16:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.655506 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.655562 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.655583 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.655603 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.655615 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:56Z","lastTransitionTime":"2025-12-02T16:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.758638 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.758745 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.758769 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.758802 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.758825 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:56Z","lastTransitionTime":"2025-12-02T16:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.759656 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:56 crc kubenswrapper[4747]: E1202 16:43:56.759855 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.861472 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.861515 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.861526 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.861540 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.861551 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:56Z","lastTransitionTime":"2025-12-02T16:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.964228 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.964290 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.964312 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.964343 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:56 crc kubenswrapper[4747]: I1202 16:43:56.964425 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:56Z","lastTransitionTime":"2025-12-02T16:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.067225 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.067288 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.067311 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.067344 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.067368 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:57Z","lastTransitionTime":"2025-12-02T16:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.170973 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.171039 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.171050 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.171068 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.171081 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:57Z","lastTransitionTime":"2025-12-02T16:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.274294 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.274380 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.274398 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.274425 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.274444 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:57Z","lastTransitionTime":"2025-12-02T16:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.378210 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.378277 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.378294 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.378313 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.378326 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:57Z","lastTransitionTime":"2025-12-02T16:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.480765 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.480838 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.480853 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.480877 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.480891 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:57Z","lastTransitionTime":"2025-12-02T16:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.584135 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.584185 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.584197 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.584221 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.584237 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:57Z","lastTransitionTime":"2025-12-02T16:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.687704 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.687761 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.687780 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.687800 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.687812 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:57Z","lastTransitionTime":"2025-12-02T16:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.760339 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.760472 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:57 crc kubenswrapper[4747]: E1202 16:43:57.760540 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:57 crc kubenswrapper[4747]: E1202 16:43:57.760681 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.760370 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:57 crc kubenswrapper[4747]: E1202 16:43:57.760804 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.791171 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.791241 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.791255 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.791285 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.791301 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:57Z","lastTransitionTime":"2025-12-02T16:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.894279 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.894355 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.894365 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.894383 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.894394 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:57Z","lastTransitionTime":"2025-12-02T16:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.997033 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.997085 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.997094 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.997110 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:57 crc kubenswrapper[4747]: I1202 16:43:57.997121 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:57Z","lastTransitionTime":"2025-12-02T16:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.099747 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.099794 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.099805 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.099822 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.099831 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:58Z","lastTransitionTime":"2025-12-02T16:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.202611 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.202669 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.202681 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.202704 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.202714 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:58Z","lastTransitionTime":"2025-12-02T16:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.305782 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.305842 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.305865 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.305885 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.305931 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:58Z","lastTransitionTime":"2025-12-02T16:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.378598 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs\") pod \"network-metrics-daemon-8brc6\" (UID: \"fe274425-e804-4934-aa14-81ef24981fe9\") " pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:58 crc kubenswrapper[4747]: E1202 16:43:58.378765 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 16:43:58 crc kubenswrapper[4747]: E1202 16:43:58.378829 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs podName:fe274425-e804-4934-aa14-81ef24981fe9 nodeName:}" failed. No retries permitted until 2025-12-02 16:44:30.378810016 +0000 UTC m=+100.905698765 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs") pod "network-metrics-daemon-8brc6" (UID: "fe274425-e804-4934-aa14-81ef24981fe9") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.409152 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.409213 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.409233 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.409253 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.409264 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:58Z","lastTransitionTime":"2025-12-02T16:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.511977 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.512031 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.512041 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.512060 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.512075 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:58Z","lastTransitionTime":"2025-12-02T16:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.614663 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.614722 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.614739 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.614756 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.614767 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:58Z","lastTransitionTime":"2025-12-02T16:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.717938 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.717983 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.717993 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.718010 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.718022 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:58Z","lastTransitionTime":"2025-12-02T16:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.760381 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:43:58 crc kubenswrapper[4747]: E1202 16:43:58.760541 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.761286 4747 scope.go:117] "RemoveContainer" containerID="8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a" Dec 02 16:43:58 crc kubenswrapper[4747]: E1202 16:43:58.761624 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-zmcxm_openshift-ovn-kubernetes(b62a2b51-3b8a-4786-97ee-01d2c6332c83)\"" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.820429 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.820470 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.820480 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.820494 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.820505 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:58Z","lastTransitionTime":"2025-12-02T16:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.923129 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.923166 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.923175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.923190 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:58 crc kubenswrapper[4747]: I1202 16:43:58.923199 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:58Z","lastTransitionTime":"2025-12-02T16:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.025700 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.025742 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.025756 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.025772 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.025782 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:59Z","lastTransitionTime":"2025-12-02T16:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.129257 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.129319 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.129336 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.129362 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.129383 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:59Z","lastTransitionTime":"2025-12-02T16:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.192686 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tsbf6_de9e6dbe-5eb1-40b7-8ddf-a8df9977153a/kube-multus/0.log" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.192735 4747 generic.go:334] "Generic (PLEG): container finished" podID="de9e6dbe-5eb1-40b7-8ddf-a8df9977153a" containerID="80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17" exitCode=1 Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.192770 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tsbf6" event={"ID":"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a","Type":"ContainerDied","Data":"80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17"} Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.193285 4747 scope.go:117] "RemoveContainer" containerID="80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.207291 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.226376 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.231344 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.231393 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.231410 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.231434 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.231452 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:59Z","lastTransitionTime":"2025-12-02T16:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.244926 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.259626 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:58Z\\\",\\\"message\\\":\\\"2025-12-02T16:43:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2a868c8a-4d0d-4ba9-805d-21e5df9c997c\\\\n2025-12-02T16:43:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2a868c8a-4d0d-4ba9-805d-21e5df9c997c to /host/opt/cni/bin/\\\\n2025-12-02T16:43:13Z [verbose] multus-daemon started\\\\n2025-12-02T16:43:13Z [verbose] Readiness Indicator file check\\\\n2025-12-02T16:43:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.270869 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8brc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe274425-e804-4934-aa14-81ef24981fe9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:26Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8brc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.287477 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.305689 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.322965 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.333767 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.333799 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.333807 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.333821 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.333830 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:59Z","lastTransitionTime":"2025-12-02T16:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.347296 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:44Z\\\",\\\"message\\\":\\\"generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 16:43:43.942546 6437 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1202 16:43:43.944311 6437 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-oauth-apiserver/api]} name:Service_openshift-oauth-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.140:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {fe46cb89-4e54-4175-a112-1c5224cd299e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 16:43:43.942423 6437 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 16:43:43.944424 6437 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-zmcxm_openshift-ovn-kubernetes(b62a2b51-3b8a-4786-97ee-01d2c6332c83)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.359604 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.372763 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.382807 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.394365 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eec908e5-57bd-451d-92da-0f805a5b7254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb377c7c7f2aa97807da6355018d3cb4cdf4bb3a35435e9f095d1718ad081d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ab2655070cc0e23bd5b63b824d193119179b2413ffa8978b4e98b9fbf59bf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-98rgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.409615 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.421777 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae4844a0-1c52-4172-a301-d45444e393ee\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b04826df65b6eeb39190ed96d8a9b19e6c6d512c9959e7fcba556f81cdb6ae4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cee76ff07172c41ea65c21e54f8951d0223d6576e71d2e4ccf80001b5e3b9190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb72d2b6b402f07ecf53060ed7662c37883ec1e95b3b2bd130097e4190a6f16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.436120 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.436194 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.436210 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.436234 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.436251 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:59Z","lastTransitionTime":"2025-12-02T16:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.440854 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.455239 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.467289 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.538653 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.538706 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.538715 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.538732 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.538741 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:59Z","lastTransitionTime":"2025-12-02T16:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.642053 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.642100 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.642110 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.642129 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.642142 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:59Z","lastTransitionTime":"2025-12-02T16:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.744990 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.745050 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.745063 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.745083 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.745096 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:59Z","lastTransitionTime":"2025-12-02T16:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.760193 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.760390 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:43:59 crc kubenswrapper[4747]: E1202 16:43:59.760557 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:43:59 crc kubenswrapper[4747]: E1202 16:43:59.760812 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.760989 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:43:59 crc kubenswrapper[4747]: E1202 16:43:59.761469 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.778155 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.792362 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.809270 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.833770 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:44Z\\\",\\\"message\\\":\\\"generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 16:43:43.942546 6437 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1202 16:43:43.944311 6437 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-oauth-apiserver/api]} name:Service_openshift-oauth-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.140:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {fe46cb89-4e54-4175-a112-1c5224cd299e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 16:43:43.942423 6437 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 16:43:43.944424 6437 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-zmcxm_openshift-ovn-kubernetes(b62a2b51-3b8a-4786-97ee-01d2c6332c83)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.846788 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.847115 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.847143 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.847154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.847174 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.847191 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:59Z","lastTransitionTime":"2025-12-02T16:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.859535 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.873384 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.888327 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eec908e5-57bd-451d-92da-0f805a5b7254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb377c7c7f2aa97807da6355018d3cb4cdf4bb3a35435e9f095d1718ad081d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ab2655070cc0e23bd5b63b824d193119179b2413ffa8978b4e98b9fbf59bf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-98rgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.903848 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.917506 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae4844a0-1c52-4172-a301-d45444e393ee\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b04826df65b6eeb39190ed96d8a9b19e6c6d512c9959e7fcba556f81cdb6ae4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cee76ff07172c41ea65c21e54f8951d0223d6576e71d2e4ccf80001b5e3b9190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb72d2b6b402f07ecf53060ed7662c37883ec1e95b3b2bd130097e4190a6f16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.941051 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.950004 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.950058 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.950073 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.950091 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.950106 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:43:59Z","lastTransitionTime":"2025-12-02T16:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.956300 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.966945 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.981228 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:43:59 crc kubenswrapper[4747]: I1202 16:43:59.997150 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:43:59Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.011499 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.029489 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:58Z\\\",\\\"message\\\":\\\"2025-12-02T16:43:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2a868c8a-4d0d-4ba9-805d-21e5df9c997c\\\\n2025-12-02T16:43:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2a868c8a-4d0d-4ba9-805d-21e5df9c997c to /host/opt/cni/bin/\\\\n2025-12-02T16:43:13Z [verbose] multus-daemon started\\\\n2025-12-02T16:43:13Z [verbose] Readiness Indicator file check\\\\n2025-12-02T16:43:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.043274 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8brc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe274425-e804-4934-aa14-81ef24981fe9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:26Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8brc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.053006 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.053059 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.053076 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.053096 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.053108 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:00Z","lastTransitionTime":"2025-12-02T16:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.156359 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.156413 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.156425 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.156445 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.156459 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:00Z","lastTransitionTime":"2025-12-02T16:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.197812 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tsbf6_de9e6dbe-5eb1-40b7-8ddf-a8df9977153a/kube-multus/0.log" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.197882 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tsbf6" event={"ID":"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a","Type":"ContainerStarted","Data":"4427788e64004a199259aa6db948ace49dff52f17eeb698baf3cecfc17154cb5"} Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.219369 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:44Z\\\",\\\"message\\\":\\\"generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 16:43:43.942546 6437 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1202 16:43:43.944311 6437 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-oauth-apiserver/api]} name:Service_openshift-oauth-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.140:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {fe46cb89-4e54-4175-a112-1c5224cd299e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 16:43:43.942423 6437 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 16:43:43.944424 6437 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-zmcxm_openshift-ovn-kubernetes(b62a2b51-3b8a-4786-97ee-01d2c6332c83)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.233396 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.246025 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.258691 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.258741 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.258753 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.258772 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.258784 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:00Z","lastTransitionTime":"2025-12-02T16:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.260032 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.272444 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eec908e5-57bd-451d-92da-0f805a5b7254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb377c7c7f2aa97807da6355018d3cb4cdf4bb3a35435e9f095d1718ad081d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ab2655070cc0e23bd5b63b824d193119179b2413ffa8978b4e98b9fbf59bf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-98rgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.286100 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.299184 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.315094 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.326423 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.336939 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.350190 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.362306 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.362391 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.362407 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.362433 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.362458 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:00Z","lastTransitionTime":"2025-12-02T16:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.362797 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae4844a0-1c52-4172-a301-d45444e393ee\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b04826df65b6eeb39190ed96d8a9b19e6c6d512c9959e7fcba556f81cdb6ae4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cee76ff07172c41ea65c21e54f8951d0223d6576e71d2e4ccf80001b5e3b9190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb72d2b6b402f07ecf53060ed7662c37883ec1e95b3b2bd130097e4190a6f16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.381135 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.394321 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.407120 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4427788e64004a199259aa6db948ace49dff52f17eeb698baf3cecfc17154cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:58Z\\\",\\\"message\\\":\\\"2025-12-02T16:43:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2a868c8a-4d0d-4ba9-805d-21e5df9c997c\\\\n2025-12-02T16:43:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2a868c8a-4d0d-4ba9-805d-21e5df9c997c to /host/opt/cni/bin/\\\\n2025-12-02T16:43:13Z [verbose] multus-daemon started\\\\n2025-12-02T16:43:13Z [verbose] Readiness Indicator file check\\\\n2025-12-02T16:43:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.419030 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8brc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe274425-e804-4934-aa14-81ef24981fe9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:26Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8brc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.431497 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.445594 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:00Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.465835 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.465895 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.465926 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.465946 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.465960 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:00Z","lastTransitionTime":"2025-12-02T16:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.568051 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.568163 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.568178 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.568200 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.568216 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:00Z","lastTransitionTime":"2025-12-02T16:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.671031 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.671076 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.671085 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.671101 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.671111 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:00Z","lastTransitionTime":"2025-12-02T16:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.759515 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:44:00 crc kubenswrapper[4747]: E1202 16:44:00.759704 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.774238 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.774282 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.774290 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.774307 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.774316 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:00Z","lastTransitionTime":"2025-12-02T16:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.877383 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.877434 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.877452 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.877471 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.877482 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:00Z","lastTransitionTime":"2025-12-02T16:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.979957 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.980014 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.980031 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.980054 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:00 crc kubenswrapper[4747]: I1202 16:44:00.980071 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:00Z","lastTransitionTime":"2025-12-02T16:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.082400 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.082454 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.082472 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.082497 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.082518 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:01Z","lastTransitionTime":"2025-12-02T16:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.185699 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.186092 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.186103 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.186121 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.186130 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:01Z","lastTransitionTime":"2025-12-02T16:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.289420 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.289489 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.289513 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.289542 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.289564 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:01Z","lastTransitionTime":"2025-12-02T16:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.392373 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.392420 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.392433 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.392453 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.392468 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:01Z","lastTransitionTime":"2025-12-02T16:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.495478 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.495512 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.495520 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.495535 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.495543 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:01Z","lastTransitionTime":"2025-12-02T16:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.597747 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.597834 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.597848 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.597865 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.597896 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:01Z","lastTransitionTime":"2025-12-02T16:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.701150 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.701188 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.701198 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.701213 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.701223 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:01Z","lastTransitionTime":"2025-12-02T16:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.760690 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.760850 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.760728 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:44:01 crc kubenswrapper[4747]: E1202 16:44:01.760997 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:44:01 crc kubenswrapper[4747]: E1202 16:44:01.761327 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:44:01 crc kubenswrapper[4747]: E1202 16:44:01.761467 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.804161 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.804224 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.804236 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.804263 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.804281 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:01Z","lastTransitionTime":"2025-12-02T16:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.840420 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.840484 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.840495 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.840515 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.840532 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:01Z","lastTransitionTime":"2025-12-02T16:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:01 crc kubenswrapper[4747]: E1202 16:44:01.855732 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:01Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.860294 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.860342 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.860360 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.860381 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.860395 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:01Z","lastTransitionTime":"2025-12-02T16:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:01 crc kubenswrapper[4747]: E1202 16:44:01.873234 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:01Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.877555 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.877621 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.877634 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.877657 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.877668 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:01Z","lastTransitionTime":"2025-12-02T16:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:01 crc kubenswrapper[4747]: E1202 16:44:01.892669 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:01Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.897205 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.897266 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.897280 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.897304 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.897319 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:01Z","lastTransitionTime":"2025-12-02T16:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:01 crc kubenswrapper[4747]: E1202 16:44:01.912813 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:01Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.917364 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.917412 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.917429 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.917453 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.917466 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:01Z","lastTransitionTime":"2025-12-02T16:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:01 crc kubenswrapper[4747]: E1202 16:44:01.932964 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a121f387-7df9-4ca6-bc20-7c686c9d2626\\\",\\\"systemUUID\\\":\\\"e9e860e5-c32f-4d91-a884-f294326f5bb2\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:01Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:01 crc kubenswrapper[4747]: E1202 16:44:01.933145 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.935179 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.935212 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.935224 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.935244 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:01 crc kubenswrapper[4747]: I1202 16:44:01.935258 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:01Z","lastTransitionTime":"2025-12-02T16:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.037878 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.037942 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.037953 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.037974 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.037986 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:02Z","lastTransitionTime":"2025-12-02T16:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.141257 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.141317 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.141333 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.141355 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.141371 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:02Z","lastTransitionTime":"2025-12-02T16:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.244601 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.244654 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.244670 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.244694 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.244713 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:02Z","lastTransitionTime":"2025-12-02T16:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.347112 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.347168 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.347184 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.347209 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.347221 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:02Z","lastTransitionTime":"2025-12-02T16:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.450743 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.450808 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.450821 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.450846 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.450860 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:02Z","lastTransitionTime":"2025-12-02T16:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.553486 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.553528 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.553540 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.553558 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.553570 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:02Z","lastTransitionTime":"2025-12-02T16:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.655960 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.656007 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.656016 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.656032 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.656042 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:02Z","lastTransitionTime":"2025-12-02T16:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.759446 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:44:02 crc kubenswrapper[4747]: E1202 16:44:02.759593 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.760100 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.760147 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.760155 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.760166 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.760174 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:02Z","lastTransitionTime":"2025-12-02T16:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.863671 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.863706 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.863715 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.863729 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.863739 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:02Z","lastTransitionTime":"2025-12-02T16:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.966434 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.966484 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.966497 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.966516 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:02 crc kubenswrapper[4747]: I1202 16:44:02.966529 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:02Z","lastTransitionTime":"2025-12-02T16:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.069461 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.069526 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.069535 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.069558 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.069568 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:03Z","lastTransitionTime":"2025-12-02T16:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.173502 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.173574 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.173591 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.173622 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.173651 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:03Z","lastTransitionTime":"2025-12-02T16:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.277224 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.277300 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.277317 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.277345 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.277365 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:03Z","lastTransitionTime":"2025-12-02T16:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.380991 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.381087 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.381114 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.381139 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.381154 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:03Z","lastTransitionTime":"2025-12-02T16:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.483410 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.483447 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.483458 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.483474 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.483486 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:03Z","lastTransitionTime":"2025-12-02T16:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.586122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.586185 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.586196 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.586215 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.586229 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:03Z","lastTransitionTime":"2025-12-02T16:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.688866 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.688937 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.688991 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.689014 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.689028 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:03Z","lastTransitionTime":"2025-12-02T16:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.760228 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.760300 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.760335 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:44:03 crc kubenswrapper[4747]: E1202 16:44:03.760439 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:44:03 crc kubenswrapper[4747]: E1202 16:44:03.760588 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:44:03 crc kubenswrapper[4747]: E1202 16:44:03.760682 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.792265 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.792347 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.792371 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.792402 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.792424 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:03Z","lastTransitionTime":"2025-12-02T16:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.896710 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.896774 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.896788 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.896811 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:03 crc kubenswrapper[4747]: I1202 16:44:03.896826 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:03Z","lastTransitionTime":"2025-12-02T16:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:03.999943 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:03.999997 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.000008 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.000028 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.000053 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:04Z","lastTransitionTime":"2025-12-02T16:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.106133 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.106198 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.106216 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.106241 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.106259 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:04Z","lastTransitionTime":"2025-12-02T16:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.209979 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.210023 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.210035 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.210052 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.210064 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:04Z","lastTransitionTime":"2025-12-02T16:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.313633 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.313701 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.313715 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.313738 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.313752 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:04Z","lastTransitionTime":"2025-12-02T16:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.416091 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.416141 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.416150 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.416165 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.416176 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:04Z","lastTransitionTime":"2025-12-02T16:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.518689 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.518747 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.518759 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.518778 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.518790 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:04Z","lastTransitionTime":"2025-12-02T16:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.622082 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.622171 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.622196 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.622226 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.622247 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:04Z","lastTransitionTime":"2025-12-02T16:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.726028 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.726119 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.726146 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.726177 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.726199 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:04Z","lastTransitionTime":"2025-12-02T16:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.760107 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:44:04 crc kubenswrapper[4747]: E1202 16:44:04.760282 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.828378 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.828453 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.828485 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.828518 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.828541 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:04Z","lastTransitionTime":"2025-12-02T16:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.932375 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.932448 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.932473 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.932499 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:04 crc kubenswrapper[4747]: I1202 16:44:04.932519 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:04Z","lastTransitionTime":"2025-12-02T16:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.035522 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.035586 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.035602 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.035627 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.035648 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:05Z","lastTransitionTime":"2025-12-02T16:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.138360 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.138418 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.138434 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.138517 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.138539 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:05Z","lastTransitionTime":"2025-12-02T16:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.241638 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.241697 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.241706 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.241740 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.241752 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:05Z","lastTransitionTime":"2025-12-02T16:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.344346 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.344393 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.344405 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.344422 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.344435 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:05Z","lastTransitionTime":"2025-12-02T16:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.447580 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.447636 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.447646 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.447663 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.447673 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:05Z","lastTransitionTime":"2025-12-02T16:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.550476 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.550538 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.550552 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.550573 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.550620 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:05Z","lastTransitionTime":"2025-12-02T16:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.654073 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.654122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.654132 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.654150 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.654162 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:05Z","lastTransitionTime":"2025-12-02T16:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.757371 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.757430 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.757441 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.757463 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.757476 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:05Z","lastTransitionTime":"2025-12-02T16:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.759767 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.759831 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.759942 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:44:05 crc kubenswrapper[4747]: E1202 16:44:05.759943 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:44:05 crc kubenswrapper[4747]: E1202 16:44:05.760125 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:44:05 crc kubenswrapper[4747]: E1202 16:44:05.760195 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.774893 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.860412 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.860476 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.860488 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.860508 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.860524 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:05Z","lastTransitionTime":"2025-12-02T16:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.963058 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.963110 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.963119 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.963135 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:05 crc kubenswrapper[4747]: I1202 16:44:05.963145 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:05Z","lastTransitionTime":"2025-12-02T16:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.065750 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.065888 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.065900 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.065933 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.065946 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:06Z","lastTransitionTime":"2025-12-02T16:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.168723 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.168793 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.168803 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.168823 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.168836 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:06Z","lastTransitionTime":"2025-12-02T16:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.271722 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.271781 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.271795 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.271816 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.271830 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:06Z","lastTransitionTime":"2025-12-02T16:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.374674 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.374740 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.374751 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.374769 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.374781 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:06Z","lastTransitionTime":"2025-12-02T16:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.477877 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.477932 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.477946 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.477964 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.477977 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:06Z","lastTransitionTime":"2025-12-02T16:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.580322 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.580383 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.580396 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.580416 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.580431 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:06Z","lastTransitionTime":"2025-12-02T16:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.683061 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.683138 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.683151 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.683172 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.683189 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:06Z","lastTransitionTime":"2025-12-02T16:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.759552 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:44:06 crc kubenswrapper[4747]: E1202 16:44:06.759761 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.786009 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.786063 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.786075 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.786094 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.786110 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:06Z","lastTransitionTime":"2025-12-02T16:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.888864 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.888934 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.888944 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.888959 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.888969 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:06Z","lastTransitionTime":"2025-12-02T16:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.991402 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.991442 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.991451 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.991470 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:06 crc kubenswrapper[4747]: I1202 16:44:06.991479 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:06Z","lastTransitionTime":"2025-12-02T16:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.095031 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.095100 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.095112 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.095135 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.095152 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:07Z","lastTransitionTime":"2025-12-02T16:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.197815 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.197865 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.197876 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.197893 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.197922 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:07Z","lastTransitionTime":"2025-12-02T16:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.301255 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.301316 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.301326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.301346 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.301396 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:07Z","lastTransitionTime":"2025-12-02T16:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.404036 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.404103 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.404113 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.404127 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.404137 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:07Z","lastTransitionTime":"2025-12-02T16:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.507763 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.507820 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.507857 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.507878 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.507891 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:07Z","lastTransitionTime":"2025-12-02T16:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.611005 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.611131 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.611157 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.611194 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.611216 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:07Z","lastTransitionTime":"2025-12-02T16:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.715011 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.715072 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.715083 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.715105 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.715118 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:07Z","lastTransitionTime":"2025-12-02T16:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.760318 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.760320 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.760352 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:44:07 crc kubenswrapper[4747]: E1202 16:44:07.760509 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:44:07 crc kubenswrapper[4747]: E1202 16:44:07.760881 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:44:07 crc kubenswrapper[4747]: E1202 16:44:07.760799 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.818555 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.818617 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.818629 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.818653 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.818668 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:07Z","lastTransitionTime":"2025-12-02T16:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.921864 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.921966 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.921978 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.922000 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:07 crc kubenswrapper[4747]: I1202 16:44:07.922017 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:07Z","lastTransitionTime":"2025-12-02T16:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.024892 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.024978 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.024998 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.025021 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.025036 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:08Z","lastTransitionTime":"2025-12-02T16:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.128446 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.128500 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.128511 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.128530 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.128542 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:08Z","lastTransitionTime":"2025-12-02T16:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.231148 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.231213 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.231225 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.231245 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.231258 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:08Z","lastTransitionTime":"2025-12-02T16:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.335013 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.335074 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.335089 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.335114 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.335133 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:08Z","lastTransitionTime":"2025-12-02T16:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.438394 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.438683 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.438703 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.438732 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.438753 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:08Z","lastTransitionTime":"2025-12-02T16:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.542007 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.542090 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.542107 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.542133 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.542151 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:08Z","lastTransitionTime":"2025-12-02T16:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.644988 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.645044 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.645061 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.645084 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.645098 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:08Z","lastTransitionTime":"2025-12-02T16:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.748532 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.748596 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.748614 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.748635 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.748649 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:08Z","lastTransitionTime":"2025-12-02T16:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.760341 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:44:08 crc kubenswrapper[4747]: E1202 16:44:08.760518 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.852537 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.852606 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.852616 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.852637 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.852649 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:08Z","lastTransitionTime":"2025-12-02T16:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.955068 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.955116 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.955128 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.955156 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:08 crc kubenswrapper[4747]: I1202 16:44:08.955167 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:08Z","lastTransitionTime":"2025-12-02T16:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.057274 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.057323 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.057335 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.057355 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.057369 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:09Z","lastTransitionTime":"2025-12-02T16:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.159799 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.159870 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.159886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.159942 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.159967 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:09Z","lastTransitionTime":"2025-12-02T16:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.265188 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.265268 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.265280 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.265295 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.265308 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:09Z","lastTransitionTime":"2025-12-02T16:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.368346 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.368408 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.368425 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.368449 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.368467 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:09Z","lastTransitionTime":"2025-12-02T16:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.471217 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.471272 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.471286 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.471308 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.471320 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:09Z","lastTransitionTime":"2025-12-02T16:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.574370 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.574421 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.574431 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.574446 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.574454 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:09Z","lastTransitionTime":"2025-12-02T16:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.677569 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.677614 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.677626 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.677644 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.677656 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:09Z","lastTransitionTime":"2025-12-02T16:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.759868 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.759888 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:44:09 crc kubenswrapper[4747]: E1202 16:44:09.760178 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.759939 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:44:09 crc kubenswrapper[4747]: E1202 16:44:09.760285 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:44:09 crc kubenswrapper[4747]: E1202 16:44:09.760365 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.773886 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d87b39c0-22b2-4b6e-8270-9773480cbaf9\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54b6412bd9889bd4052094a6a615f95516b2ad40be5b683c168627f98c9c180a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b3e5f53870895e7908e862ed8bd59c4019ae3a0c8057418b3295104b982e62c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b3e5f53870895e7908e862ed8bd59c4019ae3a0c8057418b3295104b982e62c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.780430 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.780493 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.780505 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.780522 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.780534 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:09Z","lastTransitionTime":"2025-12-02T16:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.789543 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22ea6643e0c24fa8fbfd7047a4280da0767ee11baca5ac5bde492ceb06a8d9cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.809726 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.832091 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-tsbf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:44:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4427788e64004a199259aa6db948ace49dff52f17eeb698baf3cecfc17154cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:58Z\\\",\\\"message\\\":\\\"2025-12-02T16:43:13+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2a868c8a-4d0d-4ba9-805d-21e5df9c997c\\\\n2025-12-02T16:43:13+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2a868c8a-4d0d-4ba9-805d-21e5df9c997c to /host/opt/cni/bin/\\\\n2025-12-02T16:43:13Z [verbose] multus-daemon started\\\\n2025-12-02T16:43:13Z [verbose] Readiness Indicator file check\\\\n2025-12-02T16:43:58Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nr2f9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-multus\"/\"multus-tsbf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.850300 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-8brc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fe274425-e804-4934-aa14-81ef24981fe9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gclj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:26Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-8brc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.867750 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"44dd404f-95d0-4464-ac0f-ead0da9a909c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"ion-apiserver-authentication::requestheader-client-ca-file\\\\nI1202 16:43:07.426002 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1202 16:43:07.426021 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI1202 16:43:07.426044 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\"\\\\nI1202 16:43:07.426085 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-1538725758/tls.crt::/tmp/serving-cert-1538725758/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1764693771\\\\\\\\\\\\\\\" (2025-12-02 16:42:51 +0000 UTC to 2026-01-01 16:42:52 +0000 UTC (now=2025-12-02 16:43:07.426054618 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426226 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764693787\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764693787\\\\\\\\\\\\\\\" (2025-12-02 15:43:07 +0000 UTC to 2026-12-02 15:43:07 +0000 UTC (now=2025-12-02 16:43:07.426209342 +0000 UTC))\\\\\\\"\\\\nI1202 16:43:07.426252 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1202 16:43:07.426277 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1202 16:43:07.426290 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1202 16:43:07.429631 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.429739 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nI1202 16:43:07.430248 1 reflector.go:368] Caches populated for *v1.ConfigMap from k8s.io/client-go@v0.31.1/tools/cache/reflector.go:243\\\\nF1202 16:43:07.431800 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.880667 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-scr52" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"53ad3580-be8b-44c9-b657-3fa2cfd54956\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac07245cd264f814341290a93e02ba66f54a28fc81abee8538574311195f37a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lzk2t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-scr52\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.884052 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.884106 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.884117 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.884135 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.884146 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:09Z","lastTransitionTime":"2025-12-02T16:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.897951 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc9a3ea0-15a0-4198-808a-b3bd9a9f4527\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bfd053645ad19ba9b62636d06d82396a4a6d41385feee7a61afdfc5e577ba05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2be32689f22a93ced88983801d33994ee85747443d5a6e5e72a215e612adbba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44fdff5b18ecf1ed5ed8f5f0ca4726cd33d1b216d957becaa33c0853d7cd7b10\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79ef365484fcf989f2c1f13585eb5422e01c827ba43f71852b210b68dbe6a952\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://544d5e257fdc46274a2b011c41b897df8a33e173310114b871534e428f7ede5f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d771cff8c473e20a54e65898b2ca7ff46e138b29274a99361aa35bf0557d6de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2c1b742dd58d739c27af3728d64c5e8890866539a683c36774a183d02eb42d75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-msnk2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q2z9c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.916492 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b62a2b51-3b8a-4786-97ee-01d2c6332c83\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-02T16:43:44Z\\\",\\\"message\\\":\\\"generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 16:43:43.942546 6437 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1202 16:43:43.944311 6437 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-oauth-apiserver/api]} name:Service_openshift-oauth-apiserver/api_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.140:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {fe46cb89-4e54-4175-a112-1c5224cd299e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1202 16:43:43.942423 6437 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1202 16:43:43.944424 6437 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-zmcxm_openshift-ovn-kubernetes(b62a2b51-3b8a-4786-97ee-01d2c6332c83)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xd24c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:13Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zmcxm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.930343 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.946954 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.961229 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"444e1e59-7bc7-44cd-bb37-ed903442b724\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f57813c096f5833d406bea7ae1adc1371b232018c93380e953d2d13eeddf4786\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xn7k7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:12Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-m5zcc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.977694 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eec908e5-57bd-451d-92da-0f805a5b7254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bb377c7c7f2aa97807da6355018d3cb4cdf4bb3a35435e9f095d1718ad081d45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ab2655070cc0e23bd5b63b824d193119179b2413ffa8978b4e98b9fbf59bf6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xkd2m\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:25Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-98rgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.986737 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.986799 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.986811 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.986829 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.986840 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:09Z","lastTransitionTime":"2025-12-02T16:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:09 crc kubenswrapper[4747]: I1202 16:44:09.995115 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f85762fb1a88db332c486cd73c786241898c9e45b2e45f92412eec7af6e94b3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1ccf3a70922626f5b2e8ef83f2a1b9218d6c6e2aece70cf721f76f0ee2c263\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:09Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.010514 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae4844a0-1c52-4172-a301-d45444e393ee\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b04826df65b6eeb39190ed96d8a9b19e6c6d512c9959e7fcba556f81cdb6ae4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cee76ff07172c41ea65c21e54f8951d0223d6576e71d2e4ccf80001b5e3b9190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcb72d2b6b402f07ecf53060ed7662c37883ec1e95b3b2bd130097e4190a6f16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2eef2db21fbcbebcab498526a370e77ecb419e90622bbf19ddc0f20faa43e385\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.036166 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d418504b-d6ea-43b2-a8b1-c25c7aeef547\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5fb9ffd48d67d9203d6758fed41b4f3a7e2f604bdf776fb3377b4482090b602\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea6c70477e3d3b69dda283c379a8e88e5b69e36656cb9f9d1f3755cf1a9058bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2e4799ebdce40b3b3cc66617c254e62070ca03e970ba46a043b493207098672\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b06eef34688f958628952678a79aa8c8b64ffa9eca5bde558156a2161b9a7d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4db1693f1b14cb8d7e7248c30c45b6200fcf0373ca1bd4cb86c2b8e180302016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01e6562205b4e1923e5021a87b43bcbc9c186c305cb168777a7b602035d5dfc2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c1fa986f33322c4a0323f5a347e2425726f22e5b684040efb1b76ad75f1aa9b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e3b2c6cd14f19aca054d6eacd7b72c28f96209f1a18bc135d60425b5cbb94a2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.051763 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:11Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a5920647c6bfef1046d9dc02e117aad771501ce543db36d2cc3d7ce2d3b425b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.065353 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-c45mn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e33e4be-7329-468e-aee4-dc4b5fe081ba\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61b5fe060145533651199d0af625e7f3823bdd4c7173db42061b88f047beb49c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:43:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6m59s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:43:16Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-c45mn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.079317 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9cd18282-c63b-4460-941a-710c754d6a84\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:43:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T16:42:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5103a470cc7bbccffc7f5d15cf88fab32961ff5321d2517f547bdccc4f306aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c6725e4f0e2cdf80a1d2de360f65a492290aa2a3e8491bf0ea4aa5b144e597d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://08de992a97c5e744f5379e6d07dc0b768f172525a955cbbfe46b3cc6f8d2831f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T16:42:49Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T16:44:10Z is after 2025-08-24T17:21:41Z" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.089363 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.089412 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.089424 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.089444 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.089456 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:10Z","lastTransitionTime":"2025-12-02T16:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.192708 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.193461 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.193585 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.193827 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.193988 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:10Z","lastTransitionTime":"2025-12-02T16:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.297415 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.297467 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.297482 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.297500 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.297515 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:10Z","lastTransitionTime":"2025-12-02T16:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.400756 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.401206 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.401295 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.401424 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.401510 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:10Z","lastTransitionTime":"2025-12-02T16:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.504562 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.504603 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.504614 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.504631 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.504642 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:10Z","lastTransitionTime":"2025-12-02T16:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.607154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.607217 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.607231 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.607251 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.607268 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:10Z","lastTransitionTime":"2025-12-02T16:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.710125 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.710172 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.710186 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.710206 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.710220 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:10Z","lastTransitionTime":"2025-12-02T16:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.760423 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.761384 4747 scope.go:117] "RemoveContainer" containerID="8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a" Dec 02 16:44:10 crc kubenswrapper[4747]: E1202 16:44:10.761389 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.813362 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.813422 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.813433 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.813452 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.813463 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:10Z","lastTransitionTime":"2025-12-02T16:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.918619 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.918955 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.918971 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.918992 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:10 crc kubenswrapper[4747]: I1202 16:44:10.919011 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:10Z","lastTransitionTime":"2025-12-02T16:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.021185 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.021235 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.021247 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.021264 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.021277 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:11Z","lastTransitionTime":"2025-12-02T16:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.123811 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.123848 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.123857 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.123872 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.123882 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:11Z","lastTransitionTime":"2025-12-02T16:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.230650 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.230705 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.230733 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.231154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.231173 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:11Z","lastTransitionTime":"2025-12-02T16:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.334504 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.334553 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.334564 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.334583 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.334595 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:11Z","lastTransitionTime":"2025-12-02T16:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.437555 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.437602 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.437613 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.437629 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.437641 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:11Z","lastTransitionTime":"2025-12-02T16:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.540442 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.540493 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.540507 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.540524 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.540540 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:11Z","lastTransitionTime":"2025-12-02T16:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.643655 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.643734 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.643757 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.643788 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.643813 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:11Z","lastTransitionTime":"2025-12-02T16:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.735951 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.736148 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:44:11 crc kubenswrapper[4747]: E1202 16:44:11.736199 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:45:15.736164973 +0000 UTC m=+146.263053732 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.736263 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.736323 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.736362 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:44:11 crc kubenswrapper[4747]: E1202 16:44:11.736380 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 16:44:11 crc kubenswrapper[4747]: E1202 16:44:11.736420 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 16:44:11 crc kubenswrapper[4747]: E1202 16:44:11.736427 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 16:44:11 crc kubenswrapper[4747]: E1202 16:44:11.736446 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:44:11 crc kubenswrapper[4747]: E1202 16:44:11.736551 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 16:45:15.736513792 +0000 UTC m=+146.263402591 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 16:44:11 crc kubenswrapper[4747]: E1202 16:44:11.736564 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 16:44:11 crc kubenswrapper[4747]: E1202 16:44:11.736586 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 16:45:15.736569274 +0000 UTC m=+146.263458113 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:44:11 crc kubenswrapper[4747]: E1202 16:44:11.736596 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 16:44:11 crc kubenswrapper[4747]: E1202 16:44:11.736781 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 16:44:11 crc kubenswrapper[4747]: E1202 16:44:11.736795 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:44:11 crc kubenswrapper[4747]: E1202 16:44:11.736759 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 16:45:15.736734948 +0000 UTC m=+146.263623687 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 16:44:11 crc kubenswrapper[4747]: E1202 16:44:11.736864 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 16:45:15.736852742 +0000 UTC m=+146.263741491 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.746641 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.746702 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.746713 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.746730 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.746741 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:11Z","lastTransitionTime":"2025-12-02T16:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.760213 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.760256 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.760214 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:44:11 crc kubenswrapper[4747]: E1202 16:44:11.760373 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:44:11 crc kubenswrapper[4747]: E1202 16:44:11.760778 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:44:11 crc kubenswrapper[4747]: E1202 16:44:11.760935 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.850093 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.850161 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.850173 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.850195 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.850210 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:11Z","lastTransitionTime":"2025-12-02T16:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.953479 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.953523 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.953532 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.953549 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:11 crc kubenswrapper[4747]: I1202 16:44:11.953561 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:11Z","lastTransitionTime":"2025-12-02T16:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.056944 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.057014 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.057037 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.057058 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.057072 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:12Z","lastTransitionTime":"2025-12-02T16:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.088646 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.088693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.088708 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.088729 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.088744 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T16:44:12Z","lastTransitionTime":"2025-12-02T16:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.543012 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d"] Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.543700 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" Dec 02 16:44:12 crc kubenswrapper[4747]: W1202 16:44:12.546299 4747 reflector.go:561] object-"openshift-cluster-version"/"default-dockercfg-gxtc4": failed to list *v1.Secret: secrets "default-dockercfg-gxtc4" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-cluster-version": no relationship found between node 'crc' and this object Dec 02 16:44:12 crc kubenswrapper[4747]: E1202 16:44:12.546362 4747 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-version\"/\"default-dockercfg-gxtc4\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"default-dockercfg-gxtc4\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-cluster-version\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.549710 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.554052 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.555148 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.589695 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=65.589666918 podStartE2EDuration="1m5.589666918s" podCreationTimestamp="2025-12-02 16:43:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:12.572667684 +0000 UTC m=+83.099556433" watchObservedRunningTime="2025-12-02 16:44:12.589666918 +0000 UTC m=+83.116555667" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.619337 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=61.619318047 podStartE2EDuration="1m1.619318047s" podCreationTimestamp="2025-12-02 16:43:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:12.61795285 +0000 UTC m=+83.144841609" watchObservedRunningTime="2025-12-02 16:44:12.619318047 +0000 UTC m=+83.146206796" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.620179 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=37.62016909 podStartE2EDuration="37.62016909s" podCreationTimestamp="2025-12-02 16:43:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:12.589569825 +0000 UTC m=+83.116458574" watchObservedRunningTime="2025-12-02 16:44:12.62016909 +0000 UTC m=+83.147057839" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.646285 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-c45mn" podStartSLOduration=60.646257682 podStartE2EDuration="1m0.646257682s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:12.646084817 +0000 UTC m=+83.172973576" watchObservedRunningTime="2025-12-02 16:44:12.646257682 +0000 UTC m=+83.173146431" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.647512 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/17c3198b-7d7d-4b01-b48d-b0236ab56734-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rkx5d\" (UID: \"17c3198b-7d7d-4b01-b48d-b0236ab56734\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.647563 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/17c3198b-7d7d-4b01-b48d-b0236ab56734-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rkx5d\" (UID: \"17c3198b-7d7d-4b01-b48d-b0236ab56734\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.647608 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/17c3198b-7d7d-4b01-b48d-b0236ab56734-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rkx5d\" (UID: \"17c3198b-7d7d-4b01-b48d-b0236ab56734\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.647632 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/17c3198b-7d7d-4b01-b48d-b0236ab56734-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rkx5d\" (UID: \"17c3198b-7d7d-4b01-b48d-b0236ab56734\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.647708 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17c3198b-7d7d-4b01-b48d-b0236ab56734-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rkx5d\" (UID: \"17c3198b-7d7d-4b01-b48d-b0236ab56734\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.665893 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=65.665874137 podStartE2EDuration="1m5.665874137s" podCreationTimestamp="2025-12-02 16:43:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:12.665707313 +0000 UTC m=+83.192596072" watchObservedRunningTime="2025-12-02 16:44:12.665874137 +0000 UTC m=+83.192762886" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.679569 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=7.67954773 podStartE2EDuration="7.67954773s" podCreationTimestamp="2025-12-02 16:44:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:12.679139969 +0000 UTC m=+83.206028718" watchObservedRunningTime="2025-12-02 16:44:12.67954773 +0000 UTC m=+83.206436479" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.748366 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17c3198b-7d7d-4b01-b48d-b0236ab56734-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rkx5d\" (UID: \"17c3198b-7d7d-4b01-b48d-b0236ab56734\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.748416 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/17c3198b-7d7d-4b01-b48d-b0236ab56734-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rkx5d\" (UID: \"17c3198b-7d7d-4b01-b48d-b0236ab56734\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.748458 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/17c3198b-7d7d-4b01-b48d-b0236ab56734-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rkx5d\" (UID: \"17c3198b-7d7d-4b01-b48d-b0236ab56734\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.748503 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/17c3198b-7d7d-4b01-b48d-b0236ab56734-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rkx5d\" (UID: \"17c3198b-7d7d-4b01-b48d-b0236ab56734\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.748506 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/17c3198b-7d7d-4b01-b48d-b0236ab56734-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rkx5d\" (UID: \"17c3198b-7d7d-4b01-b48d-b0236ab56734\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.748524 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/17c3198b-7d7d-4b01-b48d-b0236ab56734-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rkx5d\" (UID: \"17c3198b-7d7d-4b01-b48d-b0236ab56734\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.748576 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/17c3198b-7d7d-4b01-b48d-b0236ab56734-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rkx5d\" (UID: \"17c3198b-7d7d-4b01-b48d-b0236ab56734\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.748555 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-tsbf6" podStartSLOduration=60.748541293 podStartE2EDuration="1m0.748541293s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:12.735386194 +0000 UTC m=+83.262274943" watchObservedRunningTime="2025-12-02 16:44:12.748541293 +0000 UTC m=+83.275430042" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.749529 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/17c3198b-7d7d-4b01-b48d-b0236ab56734-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rkx5d\" (UID: \"17c3198b-7d7d-4b01-b48d-b0236ab56734\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.755606 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17c3198b-7d7d-4b01-b48d-b0236ab56734-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rkx5d\" (UID: \"17c3198b-7d7d-4b01-b48d-b0236ab56734\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.760367 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:44:12 crc kubenswrapper[4747]: E1202 16:44:12.760534 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.776424 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/17c3198b-7d7d-4b01-b48d-b0236ab56734-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rkx5d\" (UID: \"17c3198b-7d7d-4b01-b48d-b0236ab56734\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.820390 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-scr52" podStartSLOduration=60.820369782 podStartE2EDuration="1m0.820369782s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:12.794339312 +0000 UTC m=+83.321228061" watchObservedRunningTime="2025-12-02 16:44:12.820369782 +0000 UTC m=+83.347258531" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.820777 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-q2z9c" podStartSLOduration=60.820772843 podStartE2EDuration="1m0.820772843s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:12.819351574 +0000 UTC m=+83.346240323" watchObservedRunningTime="2025-12-02 16:44:12.820772843 +0000 UTC m=+83.347661592" Dec 02 16:44:12 crc kubenswrapper[4747]: I1202 16:44:12.897038 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podStartSLOduration=60.897005133 podStartE2EDuration="1m0.897005133s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:12.896476479 +0000 UTC m=+83.423365248" watchObservedRunningTime="2025-12-02 16:44:12.897005133 +0000 UTC m=+83.423893882" Dec 02 16:44:13 crc kubenswrapper[4747]: I1202 16:44:13.241698 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zmcxm_b62a2b51-3b8a-4786-97ee-01d2c6332c83/ovnkube-controller/2.log" Dec 02 16:44:13 crc kubenswrapper[4747]: I1202 16:44:13.244813 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerStarted","Data":"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595"} Dec 02 16:44:13 crc kubenswrapper[4747]: I1202 16:44:13.245476 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:44:13 crc kubenswrapper[4747]: I1202 16:44:13.275799 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-98rgz" podStartSLOduration=61.275780217 podStartE2EDuration="1m1.275780217s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:12.911356884 +0000 UTC m=+83.438245633" watchObservedRunningTime="2025-12-02 16:44:13.275780217 +0000 UTC m=+83.802668966" Dec 02 16:44:13 crc kubenswrapper[4747]: I1202 16:44:13.465449 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" podStartSLOduration=61.465419411 podStartE2EDuration="1m1.465419411s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:13.276962039 +0000 UTC m=+83.803850798" watchObservedRunningTime="2025-12-02 16:44:13.465419411 +0000 UTC m=+83.992308180" Dec 02 16:44:13 crc kubenswrapper[4747]: I1202 16:44:13.465843 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-8brc6"] Dec 02 16:44:13 crc kubenswrapper[4747]: I1202 16:44:13.466031 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:44:13 crc kubenswrapper[4747]: E1202 16:44:13.466231 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:44:13 crc kubenswrapper[4747]: I1202 16:44:13.759889 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:44:13 crc kubenswrapper[4747]: I1202 16:44:13.760050 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:44:13 crc kubenswrapper[4747]: I1202 16:44:13.760087 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:44:13 crc kubenswrapper[4747]: E1202 16:44:13.760587 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:44:13 crc kubenswrapper[4747]: E1202 16:44:13.760964 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:44:13 crc kubenswrapper[4747]: E1202 16:44:13.761083 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:44:13 crc kubenswrapper[4747]: I1202 16:44:13.869620 4747 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" secret="" err="failed to sync secret cache: timed out waiting for the condition" Dec 02 16:44:13 crc kubenswrapper[4747]: I1202 16:44:13.869702 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" Dec 02 16:44:14 crc kubenswrapper[4747]: I1202 16:44:14.101788 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 02 16:44:14 crc kubenswrapper[4747]: I1202 16:44:14.250838 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" event={"ID":"17c3198b-7d7d-4b01-b48d-b0236ab56734","Type":"ContainerStarted","Data":"80b265ae3c293c8a3b1795034135964d14728dfe1e814a464ad73d9b9e9eafeb"} Dec 02 16:44:14 crc kubenswrapper[4747]: I1202 16:44:14.760316 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:44:14 crc kubenswrapper[4747]: E1202 16:44:14.760506 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-8brc6" podUID="fe274425-e804-4934-aa14-81ef24981fe9" Dec 02 16:44:15 crc kubenswrapper[4747]: I1202 16:44:15.256705 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" event={"ID":"17c3198b-7d7d-4b01-b48d-b0236ab56734","Type":"ContainerStarted","Data":"e49b9b9d2ddb8f79088502eb2357ba0e660feb86b3068dec2c669529b01d7da4"} Dec 02 16:44:15 crc kubenswrapper[4747]: I1202 16:44:15.270747 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rkx5d" podStartSLOduration=63.270717583 podStartE2EDuration="1m3.270717583s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:15.270659121 +0000 UTC m=+85.797547870" watchObservedRunningTime="2025-12-02 16:44:15.270717583 +0000 UTC m=+85.797606332" Dec 02 16:44:15 crc kubenswrapper[4747]: I1202 16:44:15.760018 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:44:15 crc kubenswrapper[4747]: I1202 16:44:15.760115 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:44:15 crc kubenswrapper[4747]: E1202 16:44:15.760573 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 16:44:15 crc kubenswrapper[4747]: I1202 16:44:15.760144 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:44:15 crc kubenswrapper[4747]: E1202 16:44:15.760650 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 16:44:15 crc kubenswrapper[4747]: E1202 16:44:15.760792 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.137598 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.137813 4747 kubelet_node_status.go:538] "Fast updating node status as it just became ready" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.192585 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dn6td"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.193599 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.193944 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ddmtz"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.194307 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.199460 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vntfv"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.200169 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vntfv" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.201059 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.201512 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-n2rcw"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.201797 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-n2rcw" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.202298 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.203831 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.204307 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.205185 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-gj7zq"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.205847 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.206730 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.206845 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.206927 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.207258 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.207312 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.207789 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.208353 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.208493 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.208670 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.213549 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.213692 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.213889 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.216924 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.218903 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.219177 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.219269 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.219527 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.220393 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.220705 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.221030 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.221327 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.222170 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.222856 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.222936 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.223051 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.223384 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.223418 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.223442 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.226277 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.238279 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.238872 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.239061 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.239235 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.239526 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.239608 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.240300 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.240453 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-c2h5g"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.240578 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.241497 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-c2h5g" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.244223 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.248999 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d4mjf"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.249821 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-8w6hw"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.250408 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.251082 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d4mjf" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.253334 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.254524 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.257406 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.261998 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cd7fp"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.262748 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qwrvt"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.263217 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qwrvt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.263880 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.271386 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.271429 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.271460 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.271375 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.271762 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.271838 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.272040 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.272139 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.272364 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.273530 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.274385 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.274662 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2lxh4"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.275385 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2lxh4" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.277988 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vpcns"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.278456 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-9jst4"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.278495 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vpcns" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.278851 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.282842 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-f27zn"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.283343 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7v6x8"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.283658 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxbcg"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.300900 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htvk2\" (UniqueName: \"kubernetes.io/projected/4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0-kube-api-access-htvk2\") pod \"authentication-operator-69f744f599-gj7zq\" (UID: \"4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.300986 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-serving-cert\") pod \"controller-manager-879f6c89f-ddmtz\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.301020 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-audit-dir\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.301043 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9330ef08-f76f-4166-a6c0-a3275375c9f1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dn6td\" (UID: \"9330ef08-f76f-4166-a6c0-a3275375c9f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.301631 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-qmfcd"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.302511 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-encryption-config\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.302611 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9330ef08-f76f-4166-a6c0-a3275375c9f1-config\") pod \"machine-api-operator-5694c8668f-dn6td\" (UID: \"9330ef08-f76f-4166-a6c0-a3275375c9f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.302659 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.302667 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-ddmtz\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.302739 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ee24f28-7d17-45f0-9c74-00b2f8b38ed4-serving-cert\") pod \"console-operator-58897d9998-n2rcw\" (UID: \"0ee24f28-7d17-45f0-9c74-00b2f8b38ed4\") " pod="openshift-console-operator/console-operator-58897d9998-n2rcw" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.302775 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-client-ca\") pod \"controller-manager-879f6c89f-ddmtz\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.302810 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-etcd-client\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.302848 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36d83461-bc1a-4cff-b763-457f2ecfa536-config\") pod \"openshift-apiserver-operator-796bbdcf4f-vntfv\" (UID: \"36d83461-bc1a-4cff-b763-457f2ecfa536\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vntfv" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.302926 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0ee24f28-7d17-45f0-9c74-00b2f8b38ed4-trusted-ca\") pod \"console-operator-58897d9998-n2rcw\" (UID: \"0ee24f28-7d17-45f0-9c74-00b2f8b38ed4\") " pod="openshift-console-operator/console-operator-58897d9998-n2rcw" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.302983 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjgbq\" (UniqueName: \"kubernetes.io/projected/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-kube-api-access-hjgbq\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.303023 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-config\") pod \"controller-manager-879f6c89f-ddmtz\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.303056 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-gj7zq\" (UID: \"4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.303125 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9330ef08-f76f-4166-a6c0-a3275375c9f1-images\") pod \"machine-api-operator-5694c8668f-dn6td\" (UID: \"9330ef08-f76f-4166-a6c0-a3275375c9f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.303161 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lz9rn\" (UniqueName: \"kubernetes.io/projected/9330ef08-f76f-4166-a6c0-a3275375c9f1-kube-api-access-lz9rn\") pod \"machine-api-operator-5694c8668f-dn6td\" (UID: \"9330ef08-f76f-4166-a6c0-a3275375c9f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.303193 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdlpd\" (UniqueName: \"kubernetes.io/projected/0ee24f28-7d17-45f0-9c74-00b2f8b38ed4-kube-api-access-vdlpd\") pod \"console-operator-58897d9998-n2rcw\" (UID: \"0ee24f28-7d17-45f0-9c74-00b2f8b38ed4\") " pod="openshift-console-operator/console-operator-58897d9998-n2rcw" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.303218 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ee24f28-7d17-45f0-9c74-00b2f8b38ed4-config\") pod \"console-operator-58897d9998-n2rcw\" (UID: \"0ee24f28-7d17-45f0-9c74-00b2f8b38ed4\") " pod="openshift-console-operator/console-operator-58897d9998-n2rcw" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.303247 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.303276 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.303305 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36d83461-bc1a-4cff-b763-457f2ecfa536-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-vntfv\" (UID: \"36d83461-bc1a-4cff-b763-457f2ecfa536\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vntfv" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.303338 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzg2m\" (UniqueName: \"kubernetes.io/projected/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-kube-api-access-lzg2m\") pod \"controller-manager-879f6c89f-ddmtz\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.303364 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0-config\") pod \"authentication-operator-69f744f599-gj7zq\" (UID: \"4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.303397 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0-service-ca-bundle\") pod \"authentication-operator-69f744f599-gj7zq\" (UID: \"4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.303455 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-serving-cert\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.303483 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzxjf\" (UniqueName: \"kubernetes.io/projected/36d83461-bc1a-4cff-b763-457f2ecfa536-kube-api-access-lzxjf\") pod \"openshift-apiserver-operator-796bbdcf4f-vntfv\" (UID: \"36d83461-bc1a-4cff-b763-457f2ecfa536\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vntfv" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.303516 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-audit-policies\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.303543 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0-serving-cert\") pod \"authentication-operator-69f744f599-gj7zq\" (UID: \"4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.304042 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-f27zn" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.304626 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.305222 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxbcg" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.305676 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.308347 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.308406 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.309270 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.309704 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.309937 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.310132 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.310285 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.310570 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.310652 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.310872 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7v6x8" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.311182 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.313983 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.320107 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.330833 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-vq4z9"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.334988 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.341434 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vq4z9" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.347749 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.347996 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9vtgt"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.348163 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.348384 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.348638 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.348857 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.348887 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.349077 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.349394 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.349438 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9vtgt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.349404 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.349649 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.349834 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.350107 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.350215 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.350112 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.350343 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.350358 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.350691 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.350974 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.351122 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.352031 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.353161 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.353418 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.354047 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.354545 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.354817 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.352673 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.355746 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.355928 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.356286 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.356307 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.356422 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.356451 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.356568 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.356990 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.357130 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.357137 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.357244 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.357335 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.359554 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-lstcr"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.360116 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-dnt2d"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.361286 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-224n4"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.362078 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-224n4" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.364099 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.365045 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.365238 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.370991 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.371257 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.371450 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.372221 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x5bs9"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.372814 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.376554 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-9vbnt"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.377084 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.377463 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.377976 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.378353 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x5bs9" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.378509 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vbnt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.378723 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.380992 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-89l9h"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.381833 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-npzxm"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.382288 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-npzxm" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.382578 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.382852 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gn7ft"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.383209 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-gn7ft" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.383894 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.391830 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.400973 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.417673 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.418372 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.418454 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d4mjf"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.418529 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ddmtz"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.418598 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.418769 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.419547 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.433378 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.437869 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dn6td"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.438218 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/04e7d78c-36d3-4997-8ed4-62c494eda7d3-default-certificate\") pod \"router-default-5444994796-qmfcd\" (UID: \"04e7d78c-36d3-4997-8ed4-62c494eda7d3\") " pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.438390 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-etcd-client\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.438876 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7b62bee3-cf8a-46b1-a0d4-fc040830cd5c-machine-approver-tls\") pod \"machine-approver-56656f9798-s2lq2\" (UID: \"7b62bee3-cf8a-46b1-a0d4-fc040830cd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.439008 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36d83461-bc1a-4cff-b763-457f2ecfa536-config\") pod \"openshift-apiserver-operator-796bbdcf4f-vntfv\" (UID: \"36d83461-bc1a-4cff-b763-457f2ecfa536\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vntfv" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.439080 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-n2rcw"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.439180 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e06e723-6eff-4c1d-ac75-e063f99e039f-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7v6x8\" (UID: \"7e06e723-6eff-4c1d-ac75-e063f99e039f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7v6x8" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.439295 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0ee24f28-7d17-45f0-9c74-00b2f8b38ed4-trusted-ca\") pod \"console-operator-58897d9998-n2rcw\" (UID: \"0ee24f28-7d17-45f0-9c74-00b2f8b38ed4\") " pod="openshift-console-operator/console-operator-58897d9998-n2rcw" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.439390 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjgbq\" (UniqueName: \"kubernetes.io/projected/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-kube-api-access-hjgbq\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.439473 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-config\") pod \"controller-manager-879f6c89f-ddmtz\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.439547 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-gj7zq\" (UID: \"4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.439962 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjcpw\" (UniqueName: \"kubernetes.io/projected/7b62bee3-cf8a-46b1-a0d4-fc040830cd5c-kube-api-access-wjcpw\") pod \"machine-approver-56656f9798-s2lq2\" (UID: \"7b62bee3-cf8a-46b1-a0d4-fc040830cd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.440086 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/11745207-557a-47de-b596-fd2e1c6d4ff7-trusted-ca\") pod \"ingress-operator-5b745b69d9-zsp6q\" (UID: \"11745207-557a-47de-b596-fd2e1c6d4ff7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.440171 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/04e7d78c-36d3-4997-8ed4-62c494eda7d3-metrics-certs\") pod \"router-default-5444994796-qmfcd\" (UID: \"04e7d78c-36d3-4997-8ed4-62c494eda7d3\") " pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.440968 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9330ef08-f76f-4166-a6c0-a3275375c9f1-images\") pod \"machine-api-operator-5694c8668f-dn6td\" (UID: \"9330ef08-f76f-4166-a6c0-a3275375c9f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.445344 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.445766 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lz9rn\" (UniqueName: \"kubernetes.io/projected/9330ef08-f76f-4166-a6c0-a3275375c9f1-kube-api-access-lz9rn\") pod \"machine-api-operator-5694c8668f-dn6td\" (UID: \"9330ef08-f76f-4166-a6c0-a3275375c9f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.460774 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdlpd\" (UniqueName: \"kubernetes.io/projected/0ee24f28-7d17-45f0-9c74-00b2f8b38ed4-kube-api-access-vdlpd\") pod \"console-operator-58897d9998-n2rcw\" (UID: \"0ee24f28-7d17-45f0-9c74-00b2f8b38ed4\") " pod="openshift-console-operator/console-operator-58897d9998-n2rcw" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.460878 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64fb89ca-43dd-47d7-a8ab-8e5525df5c59-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-vpcns\" (UID: \"64fb89ca-43dd-47d7-a8ab-8e5525df5c59\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vpcns" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.460939 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ee24f28-7d17-45f0-9c74-00b2f8b38ed4-config\") pod \"console-operator-58897d9998-n2rcw\" (UID: \"0ee24f28-7d17-45f0-9c74-00b2f8b38ed4\") " pod="openshift-console-operator/console-operator-58897d9998-n2rcw" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.460969 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf55m\" (UniqueName: \"kubernetes.io/projected/11745207-557a-47de-b596-fd2e1c6d4ff7-kube-api-access-lf55m\") pod \"ingress-operator-5b745b69d9-zsp6q\" (UID: \"11745207-557a-47de-b596-fd2e1c6d4ff7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.461065 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.461093 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.461117 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7b62bee3-cf8a-46b1-a0d4-fc040830cd5c-auth-proxy-config\") pod \"machine-approver-56656f9798-s2lq2\" (UID: \"7b62bee3-cf8a-46b1-a0d4-fc040830cd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.461514 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36d83461-bc1a-4cff-b763-457f2ecfa536-config\") pod \"openshift-apiserver-operator-796bbdcf4f-vntfv\" (UID: \"36d83461-bc1a-4cff-b763-457f2ecfa536\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vntfv" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.462600 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ee24f28-7d17-45f0-9c74-00b2f8b38ed4-config\") pod \"console-operator-58897d9998-n2rcw\" (UID: \"0ee24f28-7d17-45f0-9c74-00b2f8b38ed4\") " pod="openshift-console-operator/console-operator-58897d9998-n2rcw" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.463421 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.463573 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzg2m\" (UniqueName: \"kubernetes.io/projected/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-kube-api-access-lzg2m\") pod \"controller-manager-879f6c89f-ddmtz\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.463722 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36d83461-bc1a-4cff-b763-457f2ecfa536-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-vntfv\" (UID: \"36d83461-bc1a-4cff-b763-457f2ecfa536\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vntfv" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.463891 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/11745207-557a-47de-b596-fd2e1c6d4ff7-metrics-tls\") pod \"ingress-operator-5b745b69d9-zsp6q\" (UID: \"11745207-557a-47de-b596-fd2e1c6d4ff7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.464014 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-serving-cert\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.464113 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-c2h5g"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.464127 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0-config\") pod \"authentication-operator-69f744f599-gj7zq\" (UID: \"4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.465167 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.465466 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-gj7zq\" (UID: \"4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.465732 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.466119 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.466226 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.466286 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.466321 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.467668 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.468172 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-etcd-client\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.468292 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0-service-ca-bundle\") pod \"authentication-operator-69f744f599-gj7zq\" (UID: \"4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.468936 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0-service-ca-bundle\") pod \"authentication-operator-69f744f599-gj7zq\" (UID: \"4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.469716 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/9330ef08-f76f-4166-a6c0-a3275375c9f1-images\") pod \"machine-api-operator-5694c8668f-dn6td\" (UID: \"9330ef08-f76f-4166-a6c0-a3275375c9f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.469988 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-dnt2d"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.472523 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-b79s7"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.472539 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-config\") pod \"controller-manager-879f6c89f-ddmtz\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.471466 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0ee24f28-7d17-45f0-9c74-00b2f8b38ed4-trusted-ca\") pod \"console-operator-58897d9998-n2rcw\" (UID: \"0ee24f28-7d17-45f0-9c74-00b2f8b38ed4\") " pod="openshift-console-operator/console-operator-58897d9998-n2rcw" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.474224 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04e7d78c-36d3-4997-8ed4-62c494eda7d3-service-ca-bundle\") pod \"router-default-5444994796-qmfcd\" (UID: \"04e7d78c-36d3-4997-8ed4-62c494eda7d3\") " pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475109 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmt9j\" (UniqueName: \"kubernetes.io/projected/7e06e723-6eff-4c1d-ac75-e063f99e039f-kube-api-access-tmt9j\") pod \"openshift-controller-manager-operator-756b6f6bc6-7v6x8\" (UID: \"7e06e723-6eff-4c1d-ac75-e063f99e039f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7v6x8" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475153 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0-serving-cert\") pod \"authentication-operator-69f744f599-gj7zq\" (UID: \"4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475183 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzxjf\" (UniqueName: \"kubernetes.io/projected/36d83461-bc1a-4cff-b763-457f2ecfa536-kube-api-access-lzxjf\") pod \"openshift-apiserver-operator-796bbdcf4f-vntfv\" (UID: \"36d83461-bc1a-4cff-b763-457f2ecfa536\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vntfv" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475210 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e06e723-6eff-4c1d-ac75-e063f99e039f-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7v6x8\" (UID: \"7e06e723-6eff-4c1d-ac75-e063f99e039f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7v6x8" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475240 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-audit-policies\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475273 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64fb89ca-43dd-47d7-a8ab-8e5525df5c59-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-vpcns\" (UID: \"64fb89ca-43dd-47d7-a8ab-8e5525df5c59\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vpcns" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475301 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6c2t\" (UniqueName: \"kubernetes.io/projected/04e7d78c-36d3-4997-8ed4-62c494eda7d3-kube-api-access-w6c2t\") pod \"router-default-5444994796-qmfcd\" (UID: \"04e7d78c-36d3-4997-8ed4-62c494eda7d3\") " pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475331 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-serving-cert\") pod \"controller-manager-879f6c89f-ddmtz\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475363 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htvk2\" (UniqueName: \"kubernetes.io/projected/4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0-kube-api-access-htvk2\") pod \"authentication-operator-69f744f599-gj7zq\" (UID: \"4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475395 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-audit-dir\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475434 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9330ef08-f76f-4166-a6c0-a3275375c9f1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dn6td\" (UID: \"9330ef08-f76f-4166-a6c0-a3275375c9f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475491 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b62bee3-cf8a-46b1-a0d4-fc040830cd5c-config\") pod \"machine-approver-56656f9798-s2lq2\" (UID: \"7b62bee3-cf8a-46b1-a0d4-fc040830cd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475527 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-encryption-config\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475565 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9330ef08-f76f-4166-a6c0-a3275375c9f1-config\") pod \"machine-api-operator-5694c8668f-dn6td\" (UID: \"9330ef08-f76f-4166-a6c0-a3275375c9f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475593 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-ddmtz\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475617 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64fb89ca-43dd-47d7-a8ab-8e5525df5c59-config\") pod \"kube-apiserver-operator-766d6c64bb-vpcns\" (UID: \"64fb89ca-43dd-47d7-a8ab-8e5525df5c59\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vpcns" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475658 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ee24f28-7d17-45f0-9c74-00b2f8b38ed4-serving-cert\") pod \"console-operator-58897d9998-n2rcw\" (UID: \"0ee24f28-7d17-45f0-9c74-00b2f8b38ed4\") " pod="openshift-console-operator/console-operator-58897d9998-n2rcw" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475687 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/11745207-557a-47de-b596-fd2e1c6d4ff7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zsp6q\" (UID: \"11745207-557a-47de-b596-fd2e1c6d4ff7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475714 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-client-ca\") pod \"controller-manager-879f6c89f-ddmtz\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.475736 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/04e7d78c-36d3-4997-8ed4-62c494eda7d3-stats-auth\") pod \"router-default-5444994796-qmfcd\" (UID: \"04e7d78c-36d3-4997-8ed4-62c494eda7d3\") " pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.476281 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.476472 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-audit-policies\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.476771 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.476773 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-serving-cert\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.477253 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.477338 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-8w6hw"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.477471 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.477571 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9330ef08-f76f-4166-a6c0-a3275375c9f1-config\") pod \"machine-api-operator-5694c8668f-dn6td\" (UID: \"9330ef08-f76f-4166-a6c0-a3275375c9f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.480286 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.482186 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-ddmtz\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.482688 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-client-ca\") pod \"controller-manager-879f6c89f-ddmtz\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.482792 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-audit-dir\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.480744 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.483701 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0-config\") pod \"authentication-operator-69f744f599-gj7zq\" (UID: \"4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.484878 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0-serving-cert\") pod \"authentication-operator-69f744f599-gj7zq\" (UID: \"4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.484949 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36d83461-bc1a-4cff-b763-457f2ecfa536-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-vntfv\" (UID: \"36d83461-bc1a-4cff-b763-457f2ecfa536\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vntfv" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.485891 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-serving-cert\") pod \"controller-manager-879f6c89f-ddmtz\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.486310 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/9330ef08-f76f-4166-a6c0-a3275375c9f1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dn6td\" (UID: \"9330ef08-f76f-4166-a6c0-a3275375c9f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.487474 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0ee24f28-7d17-45f0-9c74-00b2f8b38ed4-serving-cert\") pod \"console-operator-58897d9998-n2rcw\" (UID: \"0ee24f28-7d17-45f0-9c74-00b2f8b38ed4\") " pod="openshift-console-operator/console-operator-58897d9998-n2rcw" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.493357 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9vtgt"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.494851 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.496632 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.499756 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vpcns"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.501084 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qwrvt"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.501297 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-encryption-config\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.505182 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2lxh4"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.517161 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.518736 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-f27zn"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.522576 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vntfv"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.523059 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.524436 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7v6x8"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.526029 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-224n4"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.527492 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cd7fp"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.529076 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxbcg"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.530477 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.532733 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-vq4z9"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.533850 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-9jst4"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.535126 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.535885 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.536524 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-9vbnt"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.538124 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-lstcr"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.539446 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-b79s7"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.540918 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-gj7zq"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.544977 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.547971 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-c4zkq"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.554697 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gn7ft"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.555741 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-c4zkq" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.556798 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.558470 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-qtjf6"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.561476 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.561645 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-qtjf6" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.563101 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-89l9h"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.565867 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.567103 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-qtjf6"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.568356 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.569534 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-c4zkq"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.570666 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x5bs9"] Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576525 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmt9j\" (UniqueName: \"kubernetes.io/projected/7e06e723-6eff-4c1d-ac75-e063f99e039f-kube-api-access-tmt9j\") pod \"openshift-controller-manager-operator-756b6f6bc6-7v6x8\" (UID: \"7e06e723-6eff-4c1d-ac75-e063f99e039f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7v6x8" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576565 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e06e723-6eff-4c1d-ac75-e063f99e039f-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7v6x8\" (UID: \"7e06e723-6eff-4c1d-ac75-e063f99e039f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7v6x8" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576618 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64fb89ca-43dd-47d7-a8ab-8e5525df5c59-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-vpcns\" (UID: \"64fb89ca-43dd-47d7-a8ab-8e5525df5c59\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vpcns" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576635 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6c2t\" (UniqueName: \"kubernetes.io/projected/04e7d78c-36d3-4997-8ed4-62c494eda7d3-kube-api-access-w6c2t\") pod \"router-default-5444994796-qmfcd\" (UID: \"04e7d78c-36d3-4997-8ed4-62c494eda7d3\") " pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576691 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b62bee3-cf8a-46b1-a0d4-fc040830cd5c-config\") pod \"machine-approver-56656f9798-s2lq2\" (UID: \"7b62bee3-cf8a-46b1-a0d4-fc040830cd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576709 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64fb89ca-43dd-47d7-a8ab-8e5525df5c59-config\") pod \"kube-apiserver-operator-766d6c64bb-vpcns\" (UID: \"64fb89ca-43dd-47d7-a8ab-8e5525df5c59\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vpcns" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576734 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/11745207-557a-47de-b596-fd2e1c6d4ff7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zsp6q\" (UID: \"11745207-557a-47de-b596-fd2e1c6d4ff7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576752 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/04e7d78c-36d3-4997-8ed4-62c494eda7d3-stats-auth\") pod \"router-default-5444994796-qmfcd\" (UID: \"04e7d78c-36d3-4997-8ed4-62c494eda7d3\") " pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576767 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/04e7d78c-36d3-4997-8ed4-62c494eda7d3-default-certificate\") pod \"router-default-5444994796-qmfcd\" (UID: \"04e7d78c-36d3-4997-8ed4-62c494eda7d3\") " pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576784 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7b62bee3-cf8a-46b1-a0d4-fc040830cd5c-machine-approver-tls\") pod \"machine-approver-56656f9798-s2lq2\" (UID: \"7b62bee3-cf8a-46b1-a0d4-fc040830cd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576809 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e06e723-6eff-4c1d-ac75-e063f99e039f-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7v6x8\" (UID: \"7e06e723-6eff-4c1d-ac75-e063f99e039f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7v6x8" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576839 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjcpw\" (UniqueName: \"kubernetes.io/projected/7b62bee3-cf8a-46b1-a0d4-fc040830cd5c-kube-api-access-wjcpw\") pod \"machine-approver-56656f9798-s2lq2\" (UID: \"7b62bee3-cf8a-46b1-a0d4-fc040830cd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576863 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/11745207-557a-47de-b596-fd2e1c6d4ff7-trusted-ca\") pod \"ingress-operator-5b745b69d9-zsp6q\" (UID: \"11745207-557a-47de-b596-fd2e1c6d4ff7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576881 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/04e7d78c-36d3-4997-8ed4-62c494eda7d3-metrics-certs\") pod \"router-default-5444994796-qmfcd\" (UID: \"04e7d78c-36d3-4997-8ed4-62c494eda7d3\") " pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576927 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64fb89ca-43dd-47d7-a8ab-8e5525df5c59-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-vpcns\" (UID: \"64fb89ca-43dd-47d7-a8ab-8e5525df5c59\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vpcns" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576947 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf55m\" (UniqueName: \"kubernetes.io/projected/11745207-557a-47de-b596-fd2e1c6d4ff7-kube-api-access-lf55m\") pod \"ingress-operator-5b745b69d9-zsp6q\" (UID: \"11745207-557a-47de-b596-fd2e1c6d4ff7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576962 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7b62bee3-cf8a-46b1-a0d4-fc040830cd5c-auth-proxy-config\") pod \"machine-approver-56656f9798-s2lq2\" (UID: \"7b62bee3-cf8a-46b1-a0d4-fc040830cd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.576985 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/11745207-557a-47de-b596-fd2e1c6d4ff7-metrics-tls\") pod \"ingress-operator-5b745b69d9-zsp6q\" (UID: \"11745207-557a-47de-b596-fd2e1c6d4ff7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.577015 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04e7d78c-36d3-4997-8ed4-62c494eda7d3-service-ca-bundle\") pod \"router-default-5444994796-qmfcd\" (UID: \"04e7d78c-36d3-4997-8ed4-62c494eda7d3\") " pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.577767 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04e7d78c-36d3-4997-8ed4-62c494eda7d3-service-ca-bundle\") pod \"router-default-5444994796-qmfcd\" (UID: \"04e7d78c-36d3-4997-8ed4-62c494eda7d3\") " pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.577934 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.578727 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e06e723-6eff-4c1d-ac75-e063f99e039f-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7v6x8\" (UID: \"7e06e723-6eff-4c1d-ac75-e063f99e039f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7v6x8" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.580292 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b62bee3-cf8a-46b1-a0d4-fc040830cd5c-config\") pod \"machine-approver-56656f9798-s2lq2\" (UID: \"7b62bee3-cf8a-46b1-a0d4-fc040830cd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.580377 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64fb89ca-43dd-47d7-a8ab-8e5525df5c59-config\") pod \"kube-apiserver-operator-766d6c64bb-vpcns\" (UID: \"64fb89ca-43dd-47d7-a8ab-8e5525df5c59\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vpcns" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.580884 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7b62bee3-cf8a-46b1-a0d4-fc040830cd5c-auth-proxy-config\") pod \"machine-approver-56656f9798-s2lq2\" (UID: \"7b62bee3-cf8a-46b1-a0d4-fc040830cd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.582593 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/11745207-557a-47de-b596-fd2e1c6d4ff7-trusted-ca\") pod \"ingress-operator-5b745b69d9-zsp6q\" (UID: \"11745207-557a-47de-b596-fd2e1c6d4ff7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.583418 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7b62bee3-cf8a-46b1-a0d4-fc040830cd5c-machine-approver-tls\") pod \"machine-approver-56656f9798-s2lq2\" (UID: \"7b62bee3-cf8a-46b1-a0d4-fc040830cd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.583433 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64fb89ca-43dd-47d7-a8ab-8e5525df5c59-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-vpcns\" (UID: \"64fb89ca-43dd-47d7-a8ab-8e5525df5c59\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vpcns" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.583885 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/04e7d78c-36d3-4997-8ed4-62c494eda7d3-default-certificate\") pod \"router-default-5444994796-qmfcd\" (UID: \"04e7d78c-36d3-4997-8ed4-62c494eda7d3\") " pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.585262 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/04e7d78c-36d3-4997-8ed4-62c494eda7d3-stats-auth\") pod \"router-default-5444994796-qmfcd\" (UID: \"04e7d78c-36d3-4997-8ed4-62c494eda7d3\") " pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.586061 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/11745207-557a-47de-b596-fd2e1c6d4ff7-metrics-tls\") pod \"ingress-operator-5b745b69d9-zsp6q\" (UID: \"11745207-557a-47de-b596-fd2e1c6d4ff7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.591890 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/04e7d78c-36d3-4997-8ed4-62c494eda7d3-metrics-certs\") pod \"router-default-5444994796-qmfcd\" (UID: \"04e7d78c-36d3-4997-8ed4-62c494eda7d3\") " pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.596495 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.616455 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.623037 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e06e723-6eff-4c1d-ac75-e063f99e039f-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7v6x8\" (UID: \"7e06e723-6eff-4c1d-ac75-e063f99e039f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7v6x8" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.636081 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.655962 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.675937 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.696020 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.716092 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.736186 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.755864 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.760138 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.795682 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.816833 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.835882 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.857235 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.876229 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.897078 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.916768 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.936854 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.956480 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 16:44:16 crc kubenswrapper[4747]: I1202 16:44:16.976791 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.016450 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.036012 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.056161 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.076670 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.096771 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.117221 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.137399 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.157145 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.177713 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.196995 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.216430 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.236441 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.256755 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.275575 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.297089 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.317486 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.337831 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.357507 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.374150 4747 request.go:700] Waited for 1.001882304s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-apiserver/configmaps?fieldSelector=metadata.name%3Dtrusted-ca-bundle&limit=500&resourceVersion=0 Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.383128 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.397091 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.416975 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.438065 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.456095 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.476671 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.496934 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.517228 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.535927 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.557834 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.577353 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.597190 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.617118 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.637178 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.656386 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.677091 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.697058 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.718303 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.738072 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.756803 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.760428 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.760544 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.760448 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.776606 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.806767 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.818036 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.837330 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.856892 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.877158 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.900704 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.916683 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.936124 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.956239 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 02 16:44:17 crc kubenswrapper[4747]: I1202 16:44:17.976201 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.011357 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdlpd\" (UniqueName: \"kubernetes.io/projected/0ee24f28-7d17-45f0-9c74-00b2f8b38ed4-kube-api-access-vdlpd\") pod \"console-operator-58897d9998-n2rcw\" (UID: \"0ee24f28-7d17-45f0-9c74-00b2f8b38ed4\") " pod="openshift-console-operator/console-operator-58897d9998-n2rcw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.030856 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzg2m\" (UniqueName: \"kubernetes.io/projected/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-kube-api-access-lzg2m\") pod \"controller-manager-879f6c89f-ddmtz\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.036651 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.043999 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.056624 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.076663 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.078275 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-n2rcw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.117038 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lz9rn\" (UniqueName: \"kubernetes.io/projected/9330ef08-f76f-4166-a6c0-a3275375c9f1-kube-api-access-lz9rn\") pod \"machine-api-operator-5694c8668f-dn6td\" (UID: \"9330ef08-f76f-4166-a6c0-a3275375c9f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.118504 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.136172 4747 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.157650 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.199064 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjgbq\" (UniqueName: \"kubernetes.io/projected/6068bf0f-7a74-42e0-af1f-8d7d79c174ca-kube-api-access-hjgbq\") pod \"apiserver-7bbb656c7d-4mzwr\" (UID: \"6068bf0f-7a74-42e0-af1f-8d7d79c174ca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.216633 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzxjf\" (UniqueName: \"kubernetes.io/projected/36d83461-bc1a-4cff-b763-457f2ecfa536-kube-api-access-lzxjf\") pod \"openshift-apiserver-operator-796bbdcf4f-vntfv\" (UID: \"36d83461-bc1a-4cff-b763-457f2ecfa536\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vntfv" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.236683 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.240055 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htvk2\" (UniqueName: \"kubernetes.io/projected/4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0-kube-api-access-htvk2\") pod \"authentication-operator-69f744f599-gj7zq\" (UID: \"4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.256026 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.265347 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ddmtz"] Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.279396 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.281731 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" event={"ID":"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67","Type":"ContainerStarted","Data":"93581492673f38b15b2923d7c7c67cc42d5c62fd68e15e484435c2e98e18a0ed"} Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.297185 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.313206 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-n2rcw"] Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.317411 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.322161 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.336701 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 02 16:44:18 crc kubenswrapper[4747]: W1202 16:44:18.340124 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ee24f28_7d17_45f0_9c74_00b2f8b38ed4.slice/crio-bc7516440d4c23fe7c483b9b49951e322eb940b5caad977a278575530a48ac98 WatchSource:0}: Error finding container bc7516440d4c23fe7c483b9b49951e322eb940b5caad977a278575530a48ac98: Status 404 returned error can't find the container with id bc7516440d4c23fe7c483b9b49951e322eb940b5caad977a278575530a48ac98 Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.355925 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.358054 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vntfv" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.374218 4747 request.go:700] Waited for 1.79598634s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager-operator/serviceaccounts/openshift-controller-manager-operator/token Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.398580 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmt9j\" (UniqueName: \"kubernetes.io/projected/7e06e723-6eff-4c1d-ac75-e063f99e039f-kube-api-access-tmt9j\") pod \"openshift-controller-manager-operator-756b6f6bc6-7v6x8\" (UID: \"7e06e723-6eff-4c1d-ac75-e063f99e039f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7v6x8" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.406221 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.418227 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6c2t\" (UniqueName: \"kubernetes.io/projected/04e7d78c-36d3-4997-8ed4-62c494eda7d3-kube-api-access-w6c2t\") pod \"router-default-5444994796-qmfcd\" (UID: \"04e7d78c-36d3-4997-8ed4-62c494eda7d3\") " pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.431407 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf55m\" (UniqueName: \"kubernetes.io/projected/11745207-557a-47de-b596-fd2e1c6d4ff7-kube-api-access-lf55m\") pod \"ingress-operator-5b745b69d9-zsp6q\" (UID: \"11745207-557a-47de-b596-fd2e1c6d4ff7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.445392 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.454316 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7v6x8" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.454380 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64fb89ca-43dd-47d7-a8ab-8e5525df5c59-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-vpcns\" (UID: \"64fb89ca-43dd-47d7-a8ab-8e5525df5c59\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vpcns" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.459187 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.475232 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjcpw\" (UniqueName: \"kubernetes.io/projected/7b62bee3-cf8a-46b1-a0d4-fc040830cd5c-kube-api-access-wjcpw\") pod \"machine-approver-56656f9798-s2lq2\" (UID: \"7b62bee3-cf8a-46b1-a0d4-fc040830cd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.499165 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.499302 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/11745207-557a-47de-b596-fd2e1c6d4ff7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-zsp6q\" (UID: \"11745207-557a-47de-b596-fd2e1c6d4ff7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" Dec 02 16:44:18 crc kubenswrapper[4747]: W1202 16:44:18.508533 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04e7d78c_36d3_4997_8ed4_62c494eda7d3.slice/crio-162488350d74acd9d893b5bd762caa7d50b62df813b6139fed36fb9f213e730f WatchSource:0}: Error finding container 162488350d74acd9d893b5bd762caa7d50b62df813b6139fed36fb9f213e730f: Status 404 returned error can't find the container with id 162488350d74acd9d893b5bd762caa7d50b62df813b6139fed36fb9f213e730f Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.524551 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.525057 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dn6td"] Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.577721 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.596322 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.599349 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vntfv"] Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.617066 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.629549 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/db83e8c9-a2d3-4bf3-a66f-e6df7670a420-metrics-tls\") pod \"dns-operator-744455d44c-c2h5g\" (UID: \"db83e8c9-a2d3-4bf3-a66f-e6df7670a420\") " pod="openshift-dns-operator/dns-operator-744455d44c-c2h5g" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.629599 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-registry-certificates\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.629620 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-trusted-ca\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.629658 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-audit-policies\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.629677 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.629697 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpc5m\" (UniqueName: \"kubernetes.io/projected/ee08e880-d792-46ca-927a-16e6c078726e-kube-api-access-gpc5m\") pod \"cluster-samples-operator-665b6dd947-d4mjf\" (UID: \"ee08e880-d792-46ca-927a-16e6c078726e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d4mjf" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.629715 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6c87ccc-eaf9-46fb-a066-a9a145bde3c8-config\") pod \"kube-controller-manager-operator-78b949d7b-rxbcg\" (UID: \"a6c87ccc-eaf9-46fb-a066-a9a145bde3c8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxbcg" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.629739 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b21e5fdd-5d2a-462d-975a-311b237765aa-serving-cert\") pod \"openshift-config-operator-7777fb866f-fkbtn\" (UID: \"b21e5fdd-5d2a-462d-975a-311b237765aa\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.629775 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nh7f9\" (UniqueName: \"kubernetes.io/projected/2f1dd59a-4931-4cc0-8105-f036e8e2f72f-kube-api-access-nh7f9\") pod \"control-plane-machine-set-operator-78cbb6b69f-2lxh4\" (UID: \"2f1dd59a-4931-4cc0-8105-f036e8e2f72f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2lxh4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.629792 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6031bada-c419-487a-9e05-b52277b916b5-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-qwrvt\" (UID: \"6031bada-c419-487a-9e05-b52277b916b5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qwrvt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.629807 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/15099176-32c6-403e-94bc-9a5e1fb8ae9d-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-bfnkp\" (UID: \"15099176-32c6-403e-94bc-9a5e1fb8ae9d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.629822 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lmt4\" (UniqueName: \"kubernetes.io/projected/fd81d86e-f692-4a5d-885f-9c37fa608b4f-kube-api-access-2lmt4\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.629840 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.629855 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-ca-trust-extracted\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.629872 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-bound-sa-token\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.629894 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nh9hw\" (UniqueName: \"kubernetes.io/projected/b21e5fdd-5d2a-462d-975a-311b237765aa-kube-api-access-nh9hw\") pod \"openshift-config-operator-7777fb866f-fkbtn\" (UID: \"b21e5fdd-5d2a-462d-975a-311b237765aa\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630056 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/15099176-32c6-403e-94bc-9a5e1fb8ae9d-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-bfnkp\" (UID: \"15099176-32c6-403e-94bc-9a5e1fb8ae9d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630081 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630162 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6031bada-c419-487a-9e05-b52277b916b5-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-qwrvt\" (UID: \"6031bada-c419-487a-9e05-b52277b916b5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qwrvt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630216 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630263 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630284 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-trusted-ca-bundle\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630305 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6c87ccc-eaf9-46fb-a066-a9a145bde3c8-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-rxbcg\" (UID: \"a6c87ccc-eaf9-46fb-a066-a9a145bde3c8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxbcg" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630340 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-oauth-config\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630402 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0a928a06-0efd-47fe-b677-26ee96cbd922-proxy-tls\") pod \"machine-config-controller-84d6567774-rjl6z\" (UID: \"0a928a06-0efd-47fe-b677-26ee96cbd922\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630438 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/b21e5fdd-5d2a-462d-975a-311b237765aa-available-featuregates\") pod \"openshift-config-operator-7777fb866f-fkbtn\" (UID: \"b21e5fdd-5d2a-462d-975a-311b237765aa\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630462 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630483 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6c87ccc-eaf9-46fb-a066-a9a145bde3c8-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-rxbcg\" (UID: \"a6c87ccc-eaf9-46fb-a066-a9a145bde3c8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxbcg" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630501 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wk54\" (UniqueName: \"kubernetes.io/projected/d7899a59-a928-4bd1-895b-b10de5439051-kube-api-access-2wk54\") pod \"migrator-59844c95c7-vq4z9\" (UID: \"d7899a59-a928-4bd1-895b-b10de5439051\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vq4z9" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630523 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630544 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgnc5\" (UniqueName: \"kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-kube-api-access-jgnc5\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630563 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-config\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630585 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630625 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kctmf\" (UniqueName: \"kubernetes.io/projected/581ef685-55a6-489c-ab7d-ac0db896aa30-kube-api-access-kctmf\") pod \"downloads-7954f5f757-f27zn\" (UID: \"581ef685-55a6-489c-ab7d-ac0db896aa30\") " pod="openshift-console/downloads-7954f5f757-f27zn" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630643 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-service-ca\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630663 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/2f1dd59a-4931-4cc0-8105-f036e8e2f72f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-2lxh4\" (UID: \"2f1dd59a-4931-4cc0-8105-f036e8e2f72f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2lxh4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630683 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdqcs\" (UniqueName: \"kubernetes.io/projected/0a928a06-0efd-47fe-b677-26ee96cbd922-kube-api-access-zdqcs\") pod \"machine-config-controller-84d6567774-rjl6z\" (UID: \"0a928a06-0efd-47fe-b677-26ee96cbd922\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630707 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630747 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630767 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/15099176-32c6-403e-94bc-9a5e1fb8ae9d-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-bfnkp\" (UID: \"15099176-32c6-403e-94bc-9a5e1fb8ae9d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630787 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-serving-cert\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630818 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0e56253a-5b24-4cac-8f3f-9b357bc12f82-audit-dir\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630838 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtzbf\" (UniqueName: \"kubernetes.io/projected/6031bada-c419-487a-9e05-b52277b916b5-kube-api-access-gtzbf\") pod \"kube-storage-version-migrator-operator-b67b599dd-qwrvt\" (UID: \"6031bada-c419-487a-9e05-b52277b916b5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qwrvt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630953 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7nz7\" (UniqueName: \"kubernetes.io/projected/0e56253a-5b24-4cac-8f3f-9b357bc12f82-kube-api-access-t7nz7\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.630980 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-oauth-serving-cert\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.631001 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ee08e880-d792-46ca-927a-16e6c078726e-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-d4mjf\" (UID: \"ee08e880-d792-46ca-927a-16e6c078726e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d4mjf" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.631113 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-installation-pull-secrets\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.631144 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.631184 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0a928a06-0efd-47fe-b677-26ee96cbd922-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rjl6z\" (UID: \"0a928a06-0efd-47fe-b677-26ee96cbd922\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.631205 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.631263 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6v58g\" (UniqueName: \"kubernetes.io/projected/db83e8c9-a2d3-4bf3-a66f-e6df7670a420-kube-api-access-6v58g\") pod \"dns-operator-744455d44c-c2h5g\" (UID: \"db83e8c9-a2d3-4bf3-a66f-e6df7670a420\") " pod="openshift-dns-operator/dns-operator-744455d44c-c2h5g" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.631287 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-registry-tls\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.631318 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4lbs\" (UniqueName: \"kubernetes.io/projected/15099176-32c6-403e-94bc-9a5e1fb8ae9d-kube-api-access-w4lbs\") pod \"cluster-image-registry-operator-dc59b4c8b-bfnkp\" (UID: \"15099176-32c6-403e-94bc-9a5e1fb8ae9d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp" Dec 02 16:44:18 crc kubenswrapper[4747]: E1202 16:44:18.633599 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:19.13357884 +0000 UTC m=+89.660467679 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.637018 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.640698 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr"] Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.668017 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" Dec 02 16:44:18 crc kubenswrapper[4747]: W1202 16:44:18.674005 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6068bf0f_7a74_42e0_af1f_8d7d79c174ca.slice/crio-2fd4eb2016fd8df5f916d592d1a6befeb49bf4c784135fcef7bdef6d13f4b542 WatchSource:0}: Error finding container 2fd4eb2016fd8df5f916d592d1a6befeb49bf4c784135fcef7bdef6d13f4b542: Status 404 returned error can't find the container with id 2fd4eb2016fd8df5f916d592d1a6befeb49bf4c784135fcef7bdef6d13f4b542 Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.684351 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vpcns" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.725876 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-gj7zq"] Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.728074 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.732730 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:18 crc kubenswrapper[4747]: E1202 16:44:18.732938 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:19.232893849 +0000 UTC m=+89.759782598 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733007 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/42ae6217-8ecb-40e5-9713-04ef37a93c8d-apiservice-cert\") pod \"packageserver-d55dfcdfc-pktz2\" (UID: \"42ae6217-8ecb-40e5-9713-04ef37a93c8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733039 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdqcs\" (UniqueName: \"kubernetes.io/projected/0a928a06-0efd-47fe-b677-26ee96cbd922-kube-api-access-zdqcs\") pod \"machine-config-controller-84d6567774-rjl6z\" (UID: \"0a928a06-0efd-47fe-b677-26ee96cbd922\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733057 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2079a061-35c0-40b3-8591-53decd25d0bf-trusted-ca-bundle\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733075 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ae95be13-3031-4f2a-8e4f-a6d5742c246c-proxy-tls\") pod \"machine-config-operator-74547568cd-gjmw6\" (UID: \"ae95be13-3031-4f2a-8e4f-a6d5742c246c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733095 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e0089ce4-1297-4ffc-8375-bc9c553e0fb7-node-bootstrap-token\") pod \"machine-config-server-npzxm\" (UID: \"e0089ce4-1297-4ffc-8375-bc9c553e0fb7\") " pod="openshift-machine-config-operator/machine-config-server-npzxm" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733111 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/42ae6217-8ecb-40e5-9713-04ef37a93c8d-tmpfs\") pod \"packageserver-d55dfcdfc-pktz2\" (UID: \"42ae6217-8ecb-40e5-9713-04ef37a93c8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733155 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733180 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/15099176-32c6-403e-94bc-9a5e1fb8ae9d-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-bfnkp\" (UID: \"15099176-32c6-403e-94bc-9a5e1fb8ae9d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733206 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwqdt\" (UniqueName: \"kubernetes.io/projected/72d97894-0a82-427f-8376-bea96de36324-kube-api-access-hwqdt\") pod \"marketplace-operator-79b997595-89l9h\" (UID: \"72d97894-0a82-427f-8376-bea96de36324\") " pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733231 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/acaec0be-3f32-4a76-9f3c-d291c3ceabea-profile-collector-cert\") pod \"olm-operator-6b444d44fb-jngq7\" (UID: \"acaec0be-3f32-4a76-9f3c-d291c3ceabea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733252 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733276 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41942474-c0da-4b4d-8f78-27bca08e7b53-config\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733297 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea23bfbb-080b-479c-8971-594045cdc2a4-client-ca\") pod \"route-controller-manager-6576b87f9c-8n5g7\" (UID: \"ea23bfbb-080b-479c-8971-594045cdc2a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733317 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/41942474-c0da-4b4d-8f78-27bca08e7b53-etcd-service-ca\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733346 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea23bfbb-080b-479c-8971-594045cdc2a4-serving-cert\") pod \"route-controller-manager-6576b87f9c-8n5g7\" (UID: \"ea23bfbb-080b-479c-8971-594045cdc2a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733369 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/48b44473-6607-4bce-8136-3d355e5ff018-socket-dir\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733445 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7nz7\" (UniqueName: \"kubernetes.io/projected/0e56253a-5b24-4cac-8f3f-9b357bc12f82-kube-api-access-t7nz7\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733507 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1f5586e5-c2c7-47f6-9022-004baaa3a53c-cert\") pod \"ingress-canary-qtjf6\" (UID: \"1f5586e5-c2c7-47f6-9022-004baaa3a53c\") " pod="openshift-ingress-canary/ingress-canary-qtjf6" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733547 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ee08e880-d792-46ca-927a-16e6c078726e-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-d4mjf\" (UID: \"ee08e880-d792-46ca-927a-16e6c078726e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d4mjf" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733573 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733698 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0a928a06-0efd-47fe-b677-26ee96cbd922-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rjl6z\" (UID: \"0a928a06-0efd-47fe-b677-26ee96cbd922\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.733854 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6v58g\" (UniqueName: \"kubernetes.io/projected/db83e8c9-a2d3-4bf3-a66f-e6df7670a420-kube-api-access-6v58g\") pod \"dns-operator-744455d44c-c2h5g\" (UID: \"db83e8c9-a2d3-4bf3-a66f-e6df7670a420\") " pod="openshift-dns-operator/dns-operator-744455d44c-c2h5g" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.734060 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-registry-tls\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.734200 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/2079a061-35c0-40b3-8591-53decd25d0bf-image-import-ca\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.734265 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e192bf89-81aa-4141-be73-14cef0f2ba6e-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-224n4\" (UID: \"e192bf89-81aa-4141-be73-14cef0f2ba6e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-224n4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.734295 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6blhw\" (UniqueName: \"kubernetes.io/projected/900274ee-b879-496e-bc26-a3cd59bd6536-kube-api-access-6blhw\") pod \"service-ca-9c57cc56f-gn7ft\" (UID: \"900274ee-b879-496e-bc26-a3cd59bd6536\") " pod="openshift-service-ca/service-ca-9c57cc56f-gn7ft" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.734317 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da1ec67a-b576-4f86-8b98-272f3e4ed165-config-volume\") pod \"dns-default-c4zkq\" (UID: \"da1ec67a-b576-4f86-8b98-272f3e4ed165\") " pod="openshift-dns/dns-default-c4zkq" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.734493 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4lbs\" (UniqueName: \"kubernetes.io/projected/15099176-32c6-403e-94bc-9a5e1fb8ae9d-kube-api-access-w4lbs\") pod \"cluster-image-registry-operator-dc59b4c8b-bfnkp\" (UID: \"15099176-32c6-403e-94bc-9a5e1fb8ae9d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.734560 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxqgc\" (UniqueName: \"kubernetes.io/projected/2530933f-b67d-4f7e-93c2-3edf83af285b-kube-api-access-nxqgc\") pod \"collect-profiles-29411550-2s6h7\" (UID: \"2530933f-b67d-4f7e-93c2-3edf83af285b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.734642 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2079a061-35c0-40b3-8591-53decd25d0bf-encryption-config\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.734693 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0a928a06-0efd-47fe-b677-26ee96cbd922-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rjl6z\" (UID: \"0a928a06-0efd-47fe-b677-26ee96cbd922\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z" Dec 02 16:44:18 crc kubenswrapper[4747]: E1202 16:44:18.734703 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:19.234679747 +0000 UTC m=+89.761568696 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.734847 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-trusted-ca\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.734932 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.734983 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/42ae6217-8ecb-40e5-9713-04ef37a93c8d-webhook-cert\") pod \"packageserver-d55dfcdfc-pktz2\" (UID: \"42ae6217-8ecb-40e5-9713-04ef37a93c8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735018 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b21e5fdd-5d2a-462d-975a-311b237765aa-serving-cert\") pod \"openshift-config-operator-7777fb866f-fkbtn\" (UID: \"b21e5fdd-5d2a-462d-975a-311b237765aa\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735038 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6c87ccc-eaf9-46fb-a066-a9a145bde3c8-config\") pod \"kube-controller-manager-operator-78b949d7b-rxbcg\" (UID: \"a6c87ccc-eaf9-46fb-a066-a9a145bde3c8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxbcg" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735086 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkbfv\" (UniqueName: \"kubernetes.io/projected/da1ec67a-b576-4f86-8b98-272f3e4ed165-kube-api-access-tkbfv\") pod \"dns-default-c4zkq\" (UID: \"da1ec67a-b576-4f86-8b98-272f3e4ed165\") " pod="openshift-dns/dns-default-c4zkq" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735153 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae-profile-collector-cert\") pod \"catalog-operator-68c6474976-bd9d4\" (UID: \"8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735194 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/15099176-32c6-403e-94bc-9a5e1fb8ae9d-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-bfnkp\" (UID: \"15099176-32c6-403e-94bc-9a5e1fb8ae9d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735244 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735268 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6031bada-c419-487a-9e05-b52277b916b5-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-qwrvt\" (UID: \"6031bada-c419-487a-9e05-b52277b916b5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qwrvt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735315 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/41942474-c0da-4b4d-8f78-27bca08e7b53-etcd-client\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735344 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-ca-trust-extracted\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735391 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nh9hw\" (UniqueName: \"kubernetes.io/projected/b21e5fdd-5d2a-462d-975a-311b237765aa-kube-api-access-nh9hw\") pod \"openshift-config-operator-7777fb866f-fkbtn\" (UID: \"b21e5fdd-5d2a-462d-975a-311b237765aa\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735414 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/15099176-32c6-403e-94bc-9a5e1fb8ae9d-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-bfnkp\" (UID: \"15099176-32c6-403e-94bc-9a5e1fb8ae9d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735434 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735480 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6031bada-c419-487a-9e05-b52277b916b5-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-qwrvt\" (UID: \"6031bada-c419-487a-9e05-b52277b916b5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qwrvt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735504 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcgg2\" (UniqueName: \"kubernetes.io/projected/42ae6217-8ecb-40e5-9713-04ef37a93c8d-kube-api-access-wcgg2\") pod \"packageserver-d55dfcdfc-pktz2\" (UID: \"42ae6217-8ecb-40e5-9713-04ef37a93c8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735552 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/48b44473-6607-4bce-8136-3d355e5ff018-registration-dir\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735577 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735659 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41942474-c0da-4b4d-8f78-27bca08e7b53-serving-cert\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735882 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-trusted-ca-bundle\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735932 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2530933f-b67d-4f7e-93c2-3edf83af285b-secret-volume\") pod \"collect-profiles-29411550-2s6h7\" (UID: \"2530933f-b67d-4f7e-93c2-3edf83af285b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.735982 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-oauth-config\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736023 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2079a061-35c0-40b3-8591-53decd25d0bf-etcd-client\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736105 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sh8jh\" (UniqueName: \"kubernetes.io/projected/acaec0be-3f32-4a76-9f3c-d291c3ceabea-kube-api-access-sh8jh\") pod \"olm-operator-6b444d44fb-jngq7\" (UID: \"acaec0be-3f32-4a76-9f3c-d291c3ceabea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736213 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0a928a06-0efd-47fe-b677-26ee96cbd922-proxy-tls\") pod \"machine-config-controller-84d6567774-rjl6z\" (UID: \"0a928a06-0efd-47fe-b677-26ee96cbd922\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736259 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f18ce1c-261b-4c18-b729-c3d24fceb581-serving-cert\") pod \"service-ca-operator-777779d784-9vbnt\" (UID: \"9f18ce1c-261b-4c18-b729-c3d24fceb581\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vbnt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736287 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/b21e5fdd-5d2a-462d-975a-311b237765aa-available-featuregates\") pod \"openshift-config-operator-7777fb866f-fkbtn\" (UID: \"b21e5fdd-5d2a-462d-975a-311b237765aa\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736323 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-trusted-ca\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736365 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2079a061-35c0-40b3-8591-53decd25d0bf-etcd-serving-ca\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736415 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-config\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736438 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgnc5\" (UniqueName: \"kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-kube-api-access-jgnc5\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736514 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kctmf\" (UniqueName: \"kubernetes.io/projected/581ef685-55a6-489c-ab7d-ac0db896aa30-kube-api-access-kctmf\") pod \"downloads-7954f5f757-f27zn\" (UID: \"581ef685-55a6-489c-ab7d-ac0db896aa30\") " pod="openshift-console/downloads-7954f5f757-f27zn" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736536 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-service-ca\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736600 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/2f1dd59a-4931-4cc0-8105-f036e8e2f72f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-2lxh4\" (UID: \"2f1dd59a-4931-4cc0-8105-f036e8e2f72f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2lxh4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736627 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ae95be13-3031-4f2a-8e4f-a6d5742c246c-images\") pod \"machine-config-operator-74547568cd-gjmw6\" (UID: \"ae95be13-3031-4f2a-8e4f-a6d5742c246c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736672 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4jzs\" (UniqueName: \"kubernetes.io/projected/e0089ce4-1297-4ffc-8375-bc9c553e0fb7-kube-api-access-j4jzs\") pod \"machine-config-server-npzxm\" (UID: \"e0089ce4-1297-4ffc-8375-bc9c553e0fb7\") " pod="openshift-machine-config-operator/machine-config-server-npzxm" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736708 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/2079a061-35c0-40b3-8591-53decd25d0bf-audit\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736748 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2530933f-b67d-4f7e-93c2-3edf83af285b-config-volume\") pod \"collect-profiles-29411550-2s6h7\" (UID: \"2530933f-b67d-4f7e-93c2-3edf83af285b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736787 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6c87ccc-eaf9-46fb-a066-a9a145bde3c8-config\") pod \"kube-controller-manager-operator-78b949d7b-rxbcg\" (UID: \"a6c87ccc-eaf9-46fb-a066-a9a145bde3c8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxbcg" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736787 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae-srv-cert\") pod \"catalog-operator-68c6474976-bd9d4\" (UID: \"8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736863 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-serving-cert\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736943 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c50840fc-2cea-4e15-ab40-f49eb59662e4-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9vtgt\" (UID: \"c50840fc-2cea-4e15-ab40-f49eb59662e4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9vtgt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736968 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0e56253a-5b24-4cac-8f3f-9b357bc12f82-audit-dir\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.736986 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtzbf\" (UniqueName: \"kubernetes.io/projected/6031bada-c419-487a-9e05-b52277b916b5-kube-api-access-gtzbf\") pod \"kube-storage-version-migrator-operator-b67b599dd-qwrvt\" (UID: \"6031bada-c419-487a-9e05-b52277b916b5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qwrvt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.737030 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e0089ce4-1297-4ffc-8375-bc9c553e0fb7-certs\") pod \"machine-config-server-npzxm\" (UID: \"e0089ce4-1297-4ffc-8375-bc9c553e0fb7\") " pod="openshift-machine-config-operator/machine-config-server-npzxm" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.737092 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/b21e5fdd-5d2a-462d-975a-311b237765aa-available-featuregates\") pod \"openshift-config-operator-7777fb866f-fkbtn\" (UID: \"b21e5fdd-5d2a-462d-975a-311b237765aa\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.737147 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzt8l\" (UniqueName: \"kubernetes.io/projected/48b44473-6607-4bce-8136-3d355e5ff018-kube-api-access-lzt8l\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.737230 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg9cz\" (UniqueName: \"kubernetes.io/projected/e5cd6f4c-a2dc-476c-a8e6-2913db00b182-kube-api-access-kg9cz\") pod \"package-server-manager-789f6589d5-x5bs9\" (UID: \"e5cd6f4c-a2dc-476c-a8e6-2913db00b182\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x5bs9" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.737305 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2bz9\" (UniqueName: \"kubernetes.io/projected/2079a061-35c0-40b3-8591-53decd25d0bf-kube-api-access-m2bz9\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.737325 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/acaec0be-3f32-4a76-9f3c-d291c3ceabea-srv-cert\") pod \"olm-operator-6b444d44fb-jngq7\" (UID: \"acaec0be-3f32-4a76-9f3c-d291c3ceabea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.737356 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d29rx\" (UniqueName: \"kubernetes.io/projected/41942474-c0da-4b4d-8f78-27bca08e7b53-kube-api-access-d29rx\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.737415 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/48b44473-6607-4bce-8136-3d355e5ff018-mountpoint-dir\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.737435 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/da1ec67a-b576-4f86-8b98-272f3e4ed165-metrics-tls\") pod \"dns-default-c4zkq\" (UID: \"da1ec67a-b576-4f86-8b98-272f3e4ed165\") " pod="openshift-dns/dns-default-c4zkq" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.737512 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-oauth-serving-cert\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.737585 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-installation-pull-secrets\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.737629 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2079a061-35c0-40b3-8591-53decd25d0bf-config\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.737655 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.737665 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/15099176-32c6-403e-94bc-9a5e1fb8ae9d-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-bfnkp\" (UID: \"15099176-32c6-403e-94bc-9a5e1fb8ae9d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.737677 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2079a061-35c0-40b3-8591-53decd25d0bf-audit-dir\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.737722 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/900274ee-b879-496e-bc26-a3cd59bd6536-signing-key\") pod \"service-ca-9c57cc56f-gn7ft\" (UID: \"900274ee-b879-496e-bc26-a3cd59bd6536\") " pod="openshift-service-ca/service-ca-9c57cc56f-gn7ft" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.737744 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/48b44473-6607-4bce-8136-3d355e5ff018-csi-data-dir\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.738139 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ee08e880-d792-46ca-927a-16e6c078726e-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-d4mjf\" (UID: \"ee08e880-d792-46ca-927a-16e6c078726e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d4mjf" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.738451 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.739046 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b21e5fdd-5d2a-462d-975a-311b237765aa-serving-cert\") pod \"openshift-config-operator-7777fb866f-fkbtn\" (UID: \"b21e5fdd-5d2a-462d-975a-311b237765aa\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.739059 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/41942474-c0da-4b4d-8f78-27bca08e7b53-etcd-ca\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.739113 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-registry-tls\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.739269 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.739265 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.740216 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-config\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.740432 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-ca-trust-extracted\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.740439 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-trusted-ca-bundle\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.740568 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0e56253a-5b24-4cac-8f3f-9b357bc12f82-audit-dir\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.740807 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/db83e8c9-a2d3-4bf3-a66f-e6df7670a420-metrics-tls\") pod \"dns-operator-744455d44c-c2h5g\" (UID: \"db83e8c9-a2d3-4bf3-a66f-e6df7670a420\") " pod="openshift-dns-operator/dns-operator-744455d44c-c2h5g" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.740856 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-registry-certificates\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.740862 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/15099176-32c6-403e-94bc-9a5e1fb8ae9d-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-bfnkp\" (UID: \"15099176-32c6-403e-94bc-9a5e1fb8ae9d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.740886 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/e5cd6f4c-a2dc-476c-a8e6-2913db00b182-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-x5bs9\" (UID: \"e5cd6f4c-a2dc-476c-a8e6-2913db00b182\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x5bs9" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.740939 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c50840fc-2cea-4e15-ab40-f49eb59662e4-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9vtgt\" (UID: \"c50840fc-2cea-4e15-ab40-f49eb59662e4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9vtgt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.740965 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjpx6\" (UniqueName: \"kubernetes.io/projected/1f5586e5-c2c7-47f6-9022-004baaa3a53c-kube-api-access-hjpx6\") pod \"ingress-canary-qtjf6\" (UID: \"1f5586e5-c2c7-47f6-9022-004baaa3a53c\") " pod="openshift-ingress-canary/ingress-canary-qtjf6" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.740992 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2079a061-35c0-40b3-8591-53decd25d0bf-serving-cert\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.741041 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.741064 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hn8rq\" (UniqueName: \"kubernetes.io/projected/8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae-kube-api-access-hn8rq\") pod \"catalog-operator-68c6474976-bd9d4\" (UID: \"8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.741300 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-audit-policies\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.741342 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpc5m\" (UniqueName: \"kubernetes.io/projected/ee08e880-d792-46ca-927a-16e6c078726e-kube-api-access-gpc5m\") pod \"cluster-samples-operator-665b6dd947-d4mjf\" (UID: \"ee08e880-d792-46ca-927a-16e6c078726e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d4mjf" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.741375 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/900274ee-b879-496e-bc26-a3cd59bd6536-signing-cabundle\") pod \"service-ca-9c57cc56f-gn7ft\" (UID: \"900274ee-b879-496e-bc26-a3cd59bd6536\") " pod="openshift-service-ca/service-ca-9c57cc56f-gn7ft" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.741506 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ae95be13-3031-4f2a-8e4f-a6d5742c246c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-gjmw6\" (UID: \"ae95be13-3031-4f2a-8e4f-a6d5742c246c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.741654 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nh7f9\" (UniqueName: \"kubernetes.io/projected/2f1dd59a-4931-4cc0-8105-f036e8e2f72f-kube-api-access-nh7f9\") pod \"control-plane-machine-set-operator-78cbb6b69f-2lxh4\" (UID: \"2f1dd59a-4931-4cc0-8105-f036e8e2f72f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2lxh4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.741829 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/48b44473-6607-4bce-8136-3d355e5ff018-plugins-dir\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.741866 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f18ce1c-261b-4c18-b729-c3d24fceb581-config\") pod \"service-ca-operator-777779d784-9vbnt\" (UID: \"9f18ce1c-261b-4c18-b729-c3d24fceb581\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vbnt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.741926 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lmt4\" (UniqueName: \"kubernetes.io/projected/fd81d86e-f692-4a5d-885f-9c37fa608b4f-kube-api-access-2lmt4\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.741963 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-oauth-serving-cert\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742059 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-bound-sa-token\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742111 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c50840fc-2cea-4e15-ab40-f49eb59662e4-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9vtgt\" (UID: \"c50840fc-2cea-4e15-ab40-f49eb59662e4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9vtgt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742199 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/72d97894-0a82-427f-8376-bea96de36324-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-89l9h\" (UID: \"72d97894-0a82-427f-8376-bea96de36324\") " pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742294 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742345 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6c87ccc-eaf9-46fb-a066-a9a145bde3c8-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-rxbcg\" (UID: \"a6c87ccc-eaf9-46fb-a066-a9a145bde3c8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxbcg" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742380 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/2079a061-35c0-40b3-8591-53decd25d0bf-node-pullsecrets\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742381 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-service-ca\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742444 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfrvc\" (UniqueName: \"kubernetes.io/projected/ae95be13-3031-4f2a-8e4f-a6d5742c246c-kube-api-access-lfrvc\") pod \"machine-config-operator-74547568cd-gjmw6\" (UID: \"ae95be13-3031-4f2a-8e4f-a6d5742c246c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742508 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/72d97894-0a82-427f-8376-bea96de36324-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-89l9h\" (UID: \"72d97894-0a82-427f-8376-bea96de36324\") " pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742542 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbvn8\" (UniqueName: \"kubernetes.io/projected/ea23bfbb-080b-479c-8971-594045cdc2a4-kube-api-access-cbvn8\") pod \"route-controller-manager-6576b87f9c-8n5g7\" (UID: \"ea23bfbb-080b-479c-8971-594045cdc2a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742623 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssn5m\" (UniqueName: \"kubernetes.io/projected/e192bf89-81aa-4141-be73-14cef0f2ba6e-kube-api-access-ssn5m\") pod \"multus-admission-controller-857f4d67dd-224n4\" (UID: \"e192bf89-81aa-4141-be73-14cef0f2ba6e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-224n4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742654 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea23bfbb-080b-479c-8971-594045cdc2a4-config\") pod \"route-controller-manager-6576b87f9c-8n5g7\" (UID: \"ea23bfbb-080b-479c-8971-594045cdc2a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742683 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742757 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6c87ccc-eaf9-46fb-a066-a9a145bde3c8-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-rxbcg\" (UID: \"a6c87ccc-eaf9-46fb-a066-a9a145bde3c8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxbcg" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742810 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wk54\" (UniqueName: \"kubernetes.io/projected/d7899a59-a928-4bd1-895b-b10de5439051-kube-api-access-2wk54\") pod \"migrator-59844c95c7-vq4z9\" (UID: \"d7899a59-a928-4bd1-895b-b10de5439051\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vq4z9" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742838 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr979\" (UniqueName: \"kubernetes.io/projected/9f18ce1c-261b-4c18-b729-c3d24fceb581-kube-api-access-fr979\") pod \"service-ca-operator-777779d784-9vbnt\" (UID: \"9f18ce1c-261b-4c18-b729-c3d24fceb581\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vbnt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742921 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-serving-cert\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.742928 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.743027 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.744117 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-registry-certificates\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.744397 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/2f1dd59a-4931-4cc0-8105-f036e8e2f72f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-2lxh4\" (UID: \"2f1dd59a-4931-4cc0-8105-f036e8e2f72f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2lxh4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.744402 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0a928a06-0efd-47fe-b677-26ee96cbd922-proxy-tls\") pod \"machine-config-controller-84d6567774-rjl6z\" (UID: \"0a928a06-0efd-47fe-b677-26ee96cbd922\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.745001 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-installation-pull-secrets\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.745044 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.745280 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-oauth-config\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.745415 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-audit-policies\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.745461 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.745565 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.746113 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/db83e8c9-a2d3-4bf3-a66f-e6df7670a420-metrics-tls\") pod \"dns-operator-744455d44c-c2h5g\" (UID: \"db83e8c9-a2d3-4bf3-a66f-e6df7670a420\") " pod="openshift-dns-operator/dns-operator-744455d44c-c2h5g" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.746752 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.746825 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.748295 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.748338 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6c87ccc-eaf9-46fb-a066-a9a145bde3c8-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-rxbcg\" (UID: \"a6c87ccc-eaf9-46fb-a066-a9a145bde3c8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxbcg" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.767701 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7v6x8"] Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.772775 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7nz7\" (UniqueName: \"kubernetes.io/projected/0e56253a-5b24-4cac-8f3f-9b357bc12f82-kube-api-access-t7nz7\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.780132 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6031bada-c419-487a-9e05-b52277b916b5-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-qwrvt\" (UID: \"6031bada-c419-487a-9e05-b52277b916b5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qwrvt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.781719 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-cd7fp\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.787201 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6031bada-c419-487a-9e05-b52277b916b5-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-qwrvt\" (UID: \"6031bada-c419-487a-9e05-b52277b916b5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qwrvt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.797245 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6v58g\" (UniqueName: \"kubernetes.io/projected/db83e8c9-a2d3-4bf3-a66f-e6df7670a420-kube-api-access-6v58g\") pod \"dns-operator-744455d44c-c2h5g\" (UID: \"db83e8c9-a2d3-4bf3-a66f-e6df7670a420\") " pod="openshift-dns-operator/dns-operator-744455d44c-c2h5g" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.823719 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdqcs\" (UniqueName: \"kubernetes.io/projected/0a928a06-0efd-47fe-b677-26ee96cbd922-kube-api-access-zdqcs\") pod \"machine-config-controller-84d6567774-rjl6z\" (UID: \"0a928a06-0efd-47fe-b677-26ee96cbd922\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.836895 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4lbs\" (UniqueName: \"kubernetes.io/projected/15099176-32c6-403e-94bc-9a5e1fb8ae9d-kube-api-access-w4lbs\") pod \"cluster-image-registry-operator-dc59b4c8b-bfnkp\" (UID: \"15099176-32c6-403e-94bc-9a5e1fb8ae9d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.840190 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-c2h5g" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.843862 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844163 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssn5m\" (UniqueName: \"kubernetes.io/projected/e192bf89-81aa-4141-be73-14cef0f2ba6e-kube-api-access-ssn5m\") pod \"multus-admission-controller-857f4d67dd-224n4\" (UID: \"e192bf89-81aa-4141-be73-14cef0f2ba6e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-224n4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844196 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea23bfbb-080b-479c-8971-594045cdc2a4-config\") pod \"route-controller-manager-6576b87f9c-8n5g7\" (UID: \"ea23bfbb-080b-479c-8971-594045cdc2a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844233 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr979\" (UniqueName: \"kubernetes.io/projected/9f18ce1c-261b-4c18-b729-c3d24fceb581-kube-api-access-fr979\") pod \"service-ca-operator-777779d784-9vbnt\" (UID: \"9f18ce1c-261b-4c18-b729-c3d24fceb581\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vbnt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844261 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/42ae6217-8ecb-40e5-9713-04ef37a93c8d-apiservice-cert\") pod \"packageserver-d55dfcdfc-pktz2\" (UID: \"42ae6217-8ecb-40e5-9713-04ef37a93c8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844287 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2079a061-35c0-40b3-8591-53decd25d0bf-trusted-ca-bundle\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844311 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ae95be13-3031-4f2a-8e4f-a6d5742c246c-proxy-tls\") pod \"machine-config-operator-74547568cd-gjmw6\" (UID: \"ae95be13-3031-4f2a-8e4f-a6d5742c246c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844335 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e0089ce4-1297-4ffc-8375-bc9c553e0fb7-node-bootstrap-token\") pod \"machine-config-server-npzxm\" (UID: \"e0089ce4-1297-4ffc-8375-bc9c553e0fb7\") " pod="openshift-machine-config-operator/machine-config-server-npzxm" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844358 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/42ae6217-8ecb-40e5-9713-04ef37a93c8d-tmpfs\") pod \"packageserver-d55dfcdfc-pktz2\" (UID: \"42ae6217-8ecb-40e5-9713-04ef37a93c8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844394 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwqdt\" (UniqueName: \"kubernetes.io/projected/72d97894-0a82-427f-8376-bea96de36324-kube-api-access-hwqdt\") pod \"marketplace-operator-79b997595-89l9h\" (UID: \"72d97894-0a82-427f-8376-bea96de36324\") " pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844420 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/acaec0be-3f32-4a76-9f3c-d291c3ceabea-profile-collector-cert\") pod \"olm-operator-6b444d44fb-jngq7\" (UID: \"acaec0be-3f32-4a76-9f3c-d291c3ceabea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844447 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41942474-c0da-4b4d-8f78-27bca08e7b53-config\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844469 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea23bfbb-080b-479c-8971-594045cdc2a4-client-ca\") pod \"route-controller-manager-6576b87f9c-8n5g7\" (UID: \"ea23bfbb-080b-479c-8971-594045cdc2a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844492 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/41942474-c0da-4b4d-8f78-27bca08e7b53-etcd-service-ca\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844512 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea23bfbb-080b-479c-8971-594045cdc2a4-serving-cert\") pod \"route-controller-manager-6576b87f9c-8n5g7\" (UID: \"ea23bfbb-080b-479c-8971-594045cdc2a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844642 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/48b44473-6607-4bce-8136-3d355e5ff018-socket-dir\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844679 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1f5586e5-c2c7-47f6-9022-004baaa3a53c-cert\") pod \"ingress-canary-qtjf6\" (UID: \"1f5586e5-c2c7-47f6-9022-004baaa3a53c\") " pod="openshift-ingress-canary/ingress-canary-qtjf6" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844707 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/2079a061-35c0-40b3-8591-53decd25d0bf-image-import-ca\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844732 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e192bf89-81aa-4141-be73-14cef0f2ba6e-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-224n4\" (UID: \"e192bf89-81aa-4141-be73-14cef0f2ba6e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-224n4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844760 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6blhw\" (UniqueName: \"kubernetes.io/projected/900274ee-b879-496e-bc26-a3cd59bd6536-kube-api-access-6blhw\") pod \"service-ca-9c57cc56f-gn7ft\" (UID: \"900274ee-b879-496e-bc26-a3cd59bd6536\") " pod="openshift-service-ca/service-ca-9c57cc56f-gn7ft" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844784 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da1ec67a-b576-4f86-8b98-272f3e4ed165-config-volume\") pod \"dns-default-c4zkq\" (UID: \"da1ec67a-b576-4f86-8b98-272f3e4ed165\") " pod="openshift-dns/dns-default-c4zkq" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844807 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxqgc\" (UniqueName: \"kubernetes.io/projected/2530933f-b67d-4f7e-93c2-3edf83af285b-kube-api-access-nxqgc\") pod \"collect-profiles-29411550-2s6h7\" (UID: \"2530933f-b67d-4f7e-93c2-3edf83af285b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844842 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2079a061-35c0-40b3-8591-53decd25d0bf-encryption-config\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844867 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/42ae6217-8ecb-40e5-9713-04ef37a93c8d-webhook-cert\") pod \"packageserver-d55dfcdfc-pktz2\" (UID: \"42ae6217-8ecb-40e5-9713-04ef37a93c8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844892 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkbfv\" (UniqueName: \"kubernetes.io/projected/da1ec67a-b576-4f86-8b98-272f3e4ed165-kube-api-access-tkbfv\") pod \"dns-default-c4zkq\" (UID: \"da1ec67a-b576-4f86-8b98-272f3e4ed165\") " pod="openshift-dns/dns-default-c4zkq" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844941 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae-profile-collector-cert\") pod \"catalog-operator-68c6474976-bd9d4\" (UID: \"8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.844966 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/41942474-c0da-4b4d-8f78-27bca08e7b53-etcd-client\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.845006 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcgg2\" (UniqueName: \"kubernetes.io/projected/42ae6217-8ecb-40e5-9713-04ef37a93c8d-kube-api-access-wcgg2\") pod \"packageserver-d55dfcdfc-pktz2\" (UID: \"42ae6217-8ecb-40e5-9713-04ef37a93c8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.845031 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/48b44473-6607-4bce-8136-3d355e5ff018-registration-dir\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.846553 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41942474-c0da-4b4d-8f78-27bca08e7b53-serving-cert\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.846740 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2530933f-b67d-4f7e-93c2-3edf83af285b-secret-volume\") pod \"collect-profiles-29411550-2s6h7\" (UID: \"2530933f-b67d-4f7e-93c2-3edf83af285b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.846836 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2079a061-35c0-40b3-8591-53decd25d0bf-etcd-client\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.846915 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sh8jh\" (UniqueName: \"kubernetes.io/projected/acaec0be-3f32-4a76-9f3c-d291c3ceabea-kube-api-access-sh8jh\") pod \"olm-operator-6b444d44fb-jngq7\" (UID: \"acaec0be-3f32-4a76-9f3c-d291c3ceabea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.846949 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f18ce1c-261b-4c18-b729-c3d24fceb581-serving-cert\") pod \"service-ca-operator-777779d784-9vbnt\" (UID: \"9f18ce1c-261b-4c18-b729-c3d24fceb581\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vbnt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847014 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2079a061-35c0-40b3-8591-53decd25d0bf-etcd-serving-ca\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847090 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ae95be13-3031-4f2a-8e4f-a6d5742c246c-images\") pod \"machine-config-operator-74547568cd-gjmw6\" (UID: \"ae95be13-3031-4f2a-8e4f-a6d5742c246c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847116 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4jzs\" (UniqueName: \"kubernetes.io/projected/e0089ce4-1297-4ffc-8375-bc9c553e0fb7-kube-api-access-j4jzs\") pod \"machine-config-server-npzxm\" (UID: \"e0089ce4-1297-4ffc-8375-bc9c553e0fb7\") " pod="openshift-machine-config-operator/machine-config-server-npzxm" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847173 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/2079a061-35c0-40b3-8591-53decd25d0bf-audit\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847198 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2530933f-b67d-4f7e-93c2-3edf83af285b-config-volume\") pod \"collect-profiles-29411550-2s6h7\" (UID: \"2530933f-b67d-4f7e-93c2-3edf83af285b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847256 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae-srv-cert\") pod \"catalog-operator-68c6474976-bd9d4\" (UID: \"8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847290 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c50840fc-2cea-4e15-ab40-f49eb59662e4-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9vtgt\" (UID: \"c50840fc-2cea-4e15-ab40-f49eb59662e4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9vtgt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847362 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e0089ce4-1297-4ffc-8375-bc9c553e0fb7-certs\") pod \"machine-config-server-npzxm\" (UID: \"e0089ce4-1297-4ffc-8375-bc9c553e0fb7\") " pod="openshift-machine-config-operator/machine-config-server-npzxm" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847424 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzt8l\" (UniqueName: \"kubernetes.io/projected/48b44473-6607-4bce-8136-3d355e5ff018-kube-api-access-lzt8l\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847457 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg9cz\" (UniqueName: \"kubernetes.io/projected/e5cd6f4c-a2dc-476c-a8e6-2913db00b182-kube-api-access-kg9cz\") pod \"package-server-manager-789f6589d5-x5bs9\" (UID: \"e5cd6f4c-a2dc-476c-a8e6-2913db00b182\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x5bs9" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847526 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/42ae6217-8ecb-40e5-9713-04ef37a93c8d-tmpfs\") pod \"packageserver-d55dfcdfc-pktz2\" (UID: \"42ae6217-8ecb-40e5-9713-04ef37a93c8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847598 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2bz9\" (UniqueName: \"kubernetes.io/projected/2079a061-35c0-40b3-8591-53decd25d0bf-kube-api-access-m2bz9\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847629 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/acaec0be-3f32-4a76-9f3c-d291c3ceabea-srv-cert\") pod \"olm-operator-6b444d44fb-jngq7\" (UID: \"acaec0be-3f32-4a76-9f3c-d291c3ceabea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847672 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d29rx\" (UniqueName: \"kubernetes.io/projected/41942474-c0da-4b4d-8f78-27bca08e7b53-kube-api-access-d29rx\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847700 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/48b44473-6607-4bce-8136-3d355e5ff018-mountpoint-dir\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847723 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/da1ec67a-b576-4f86-8b98-272f3e4ed165-metrics-tls\") pod \"dns-default-c4zkq\" (UID: \"da1ec67a-b576-4f86-8b98-272f3e4ed165\") " pod="openshift-dns/dns-default-c4zkq" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847756 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2079a061-35c0-40b3-8591-53decd25d0bf-config\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847784 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2079a061-35c0-40b3-8591-53decd25d0bf-audit-dir\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847808 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/900274ee-b879-496e-bc26-a3cd59bd6536-signing-key\") pod \"service-ca-9c57cc56f-gn7ft\" (UID: \"900274ee-b879-496e-bc26-a3cd59bd6536\") " pod="openshift-service-ca/service-ca-9c57cc56f-gn7ft" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.848504 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea23bfbb-080b-479c-8971-594045cdc2a4-client-ca\") pod \"route-controller-manager-6576b87f9c-8n5g7\" (UID: \"ea23bfbb-080b-479c-8971-594045cdc2a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.849432 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/41942474-c0da-4b4d-8f78-27bca08e7b53-etcd-service-ca\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.849546 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea23bfbb-080b-479c-8971-594045cdc2a4-config\") pod \"route-controller-manager-6576b87f9c-8n5g7\" (UID: \"ea23bfbb-080b-479c-8971-594045cdc2a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:44:18 crc kubenswrapper[4747]: E1202 16:44:18.849607 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:19.349580802 +0000 UTC m=+89.876469661 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.849886 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/48b44473-6607-4bce-8136-3d355e5ff018-socket-dir\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.850225 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41942474-c0da-4b4d-8f78-27bca08e7b53-config\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.850567 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/da1ec67a-b576-4f86-8b98-272f3e4ed165-config-volume\") pod \"dns-default-c4zkq\" (UID: \"da1ec67a-b576-4f86-8b98-272f3e4ed165\") " pod="openshift-dns/dns-default-c4zkq" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.852149 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/48b44473-6607-4bce-8136-3d355e5ff018-csi-data-dir\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.852193 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2079a061-35c0-40b3-8591-53decd25d0bf-audit-dir\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.847834 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/48b44473-6607-4bce-8136-3d355e5ff018-csi-data-dir\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.853131 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/41942474-c0da-4b4d-8f78-27bca08e7b53-etcd-ca\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.853197 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/e5cd6f4c-a2dc-476c-a8e6-2913db00b182-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-x5bs9\" (UID: \"e5cd6f4c-a2dc-476c-a8e6-2913db00b182\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x5bs9" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.853242 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c50840fc-2cea-4e15-ab40-f49eb59662e4-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9vtgt\" (UID: \"c50840fc-2cea-4e15-ab40-f49eb59662e4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9vtgt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.853278 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjpx6\" (UniqueName: \"kubernetes.io/projected/1f5586e5-c2c7-47f6-9022-004baaa3a53c-kube-api-access-hjpx6\") pod \"ingress-canary-qtjf6\" (UID: \"1f5586e5-c2c7-47f6-9022-004baaa3a53c\") " pod="openshift-ingress-canary/ingress-canary-qtjf6" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.853306 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2079a061-35c0-40b3-8591-53decd25d0bf-serving-cert\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.853340 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hn8rq\" (UniqueName: \"kubernetes.io/projected/8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae-kube-api-access-hn8rq\") pod \"catalog-operator-68c6474976-bd9d4\" (UID: \"8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.853417 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/900274ee-b879-496e-bc26-a3cd59bd6536-signing-cabundle\") pod \"service-ca-9c57cc56f-gn7ft\" (UID: \"900274ee-b879-496e-bc26-a3cd59bd6536\") " pod="openshift-service-ca/service-ca-9c57cc56f-gn7ft" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.853469 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ae95be13-3031-4f2a-8e4f-a6d5742c246c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-gjmw6\" (UID: \"ae95be13-3031-4f2a-8e4f-a6d5742c246c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.853513 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/48b44473-6607-4bce-8136-3d355e5ff018-plugins-dir\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.853552 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f18ce1c-261b-4c18-b729-c3d24fceb581-config\") pod \"service-ca-operator-777779d784-9vbnt\" (UID: \"9f18ce1c-261b-4c18-b729-c3d24fceb581\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vbnt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.853599 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c50840fc-2cea-4e15-ab40-f49eb59662e4-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9vtgt\" (UID: \"c50840fc-2cea-4e15-ab40-f49eb59662e4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9vtgt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.853628 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/72d97894-0a82-427f-8376-bea96de36324-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-89l9h\" (UID: \"72d97894-0a82-427f-8376-bea96de36324\") " pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.853701 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/2079a061-35c0-40b3-8591-53decd25d0bf-node-pullsecrets\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.853749 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfrvc\" (UniqueName: \"kubernetes.io/projected/ae95be13-3031-4f2a-8e4f-a6d5742c246c-kube-api-access-lfrvc\") pod \"machine-config-operator-74547568cd-gjmw6\" (UID: \"ae95be13-3031-4f2a-8e4f-a6d5742c246c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.853782 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/72d97894-0a82-427f-8376-bea96de36324-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-89l9h\" (UID: \"72d97894-0a82-427f-8376-bea96de36324\") " pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.853808 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbvn8\" (UniqueName: \"kubernetes.io/projected/ea23bfbb-080b-479c-8971-594045cdc2a4-kube-api-access-cbvn8\") pod \"route-controller-manager-6576b87f9c-8n5g7\" (UID: \"ea23bfbb-080b-479c-8971-594045cdc2a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.854762 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/41942474-c0da-4b4d-8f78-27bca08e7b53-etcd-ca\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.855983 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2079a061-35c0-40b3-8591-53decd25d0bf-encryption-config\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.856458 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2530933f-b67d-4f7e-93c2-3edf83af285b-config-volume\") pod \"collect-profiles-29411550-2s6h7\" (UID: \"2530933f-b67d-4f7e-93c2-3edf83af285b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.856813 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/42ae6217-8ecb-40e5-9713-04ef37a93c8d-webhook-cert\") pod \"packageserver-d55dfcdfc-pktz2\" (UID: \"42ae6217-8ecb-40e5-9713-04ef37a93c8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.856921 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e192bf89-81aa-4141-be73-14cef0f2ba6e-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-224n4\" (UID: \"e192bf89-81aa-4141-be73-14cef0f2ba6e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-224n4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.856978 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/42ae6217-8ecb-40e5-9713-04ef37a93c8d-apiservice-cert\") pod \"packageserver-d55dfcdfc-pktz2\" (UID: \"42ae6217-8ecb-40e5-9713-04ef37a93c8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.857069 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/48b44473-6607-4bce-8136-3d355e5ff018-plugins-dir\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.857439 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/48b44473-6607-4bce-8136-3d355e5ff018-registration-dir\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.857446 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ae95be13-3031-4f2a-8e4f-a6d5742c246c-proxy-tls\") pod \"machine-config-operator-74547568cd-gjmw6\" (UID: \"ae95be13-3031-4f2a-8e4f-a6d5742c246c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.857823 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/900274ee-b879-496e-bc26-a3cd59bd6536-signing-key\") pod \"service-ca-9c57cc56f-gn7ft\" (UID: \"900274ee-b879-496e-bc26-a3cd59bd6536\") " pod="openshift-service-ca/service-ca-9c57cc56f-gn7ft" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.858007 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e0089ce4-1297-4ffc-8375-bc9c553e0fb7-node-bootstrap-token\") pod \"machine-config-server-npzxm\" (UID: \"e0089ce4-1297-4ffc-8375-bc9c553e0fb7\") " pod="openshift-machine-config-operator/machine-config-server-npzxm" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.858589 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ae95be13-3031-4f2a-8e4f-a6d5742c246c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-gjmw6\" (UID: \"ae95be13-3031-4f2a-8e4f-a6d5742c246c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.858817 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c50840fc-2cea-4e15-ab40-f49eb59662e4-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9vtgt\" (UID: \"c50840fc-2cea-4e15-ab40-f49eb59662e4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9vtgt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.861592 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/2079a061-35c0-40b3-8591-53decd25d0bf-node-pullsecrets\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.863222 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/48b44473-6607-4bce-8136-3d355e5ff018-mountpoint-dir\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.864341 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f18ce1c-261b-4c18-b729-c3d24fceb581-config\") pod \"service-ca-operator-777779d784-9vbnt\" (UID: \"9f18ce1c-261b-4c18-b729-c3d24fceb581\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vbnt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.864504 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea23bfbb-080b-479c-8971-594045cdc2a4-serving-cert\") pod \"route-controller-manager-6576b87f9c-8n5g7\" (UID: \"ea23bfbb-080b-479c-8971-594045cdc2a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.866187 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/acaec0be-3f32-4a76-9f3c-d291c3ceabea-profile-collector-cert\") pod \"olm-operator-6b444d44fb-jngq7\" (UID: \"acaec0be-3f32-4a76-9f3c-d291c3ceabea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.870537 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2530933f-b67d-4f7e-93c2-3edf83af285b-secret-volume\") pod \"collect-profiles-29411550-2s6h7\" (UID: \"2530933f-b67d-4f7e-93c2-3edf83af285b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.872050 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/da1ec67a-b576-4f86-8b98-272f3e4ed165-metrics-tls\") pod \"dns-default-c4zkq\" (UID: \"da1ec67a-b576-4f86-8b98-272f3e4ed165\") " pod="openshift-dns/dns-default-c4zkq" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.872988 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/acaec0be-3f32-4a76-9f3c-d291c3ceabea-srv-cert\") pod \"olm-operator-6b444d44fb-jngq7\" (UID: \"acaec0be-3f32-4a76-9f3c-d291c3ceabea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.874712 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ae95be13-3031-4f2a-8e4f-a6d5742c246c-images\") pod \"machine-config-operator-74547568cd-gjmw6\" (UID: \"ae95be13-3031-4f2a-8e4f-a6d5742c246c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.875991 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae-profile-collector-cert\") pod \"catalog-operator-68c6474976-bd9d4\" (UID: \"8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.877930 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2079a061-35c0-40b3-8591-53decd25d0bf-config\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.878122 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f18ce1c-261b-4c18-b729-c3d24fceb581-serving-cert\") pod \"service-ca-operator-777779d784-9vbnt\" (UID: \"9f18ce1c-261b-4c18-b729-c3d24fceb581\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vbnt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.879013 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2079a061-35c0-40b3-8591-53decd25d0bf-etcd-client\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.880875 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e0089ce4-1297-4ffc-8375-bc9c553e0fb7-certs\") pod \"machine-config-server-npzxm\" (UID: \"e0089ce4-1297-4ffc-8375-bc9c553e0fb7\") " pod="openshift-machine-config-operator/machine-config-server-npzxm" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.881463 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/41942474-c0da-4b4d-8f78-27bca08e7b53-etcd-client\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.882691 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/72d97894-0a82-427f-8376-bea96de36324-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-89l9h\" (UID: \"72d97894-0a82-427f-8376-bea96de36324\") " pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.882829 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c50840fc-2cea-4e15-ab40-f49eb59662e4-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9vtgt\" (UID: \"c50840fc-2cea-4e15-ab40-f49eb59662e4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9vtgt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.882843 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/2079a061-35c0-40b3-8591-53decd25d0bf-image-import-ca\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.882898 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/900274ee-b879-496e-bc26-a3cd59bd6536-signing-cabundle\") pod \"service-ca-9c57cc56f-gn7ft\" (UID: \"900274ee-b879-496e-bc26-a3cd59bd6536\") " pod="openshift-service-ca/service-ca-9c57cc56f-gn7ft" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.883122 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/72d97894-0a82-427f-8376-bea96de36324-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-89l9h\" (UID: \"72d97894-0a82-427f-8376-bea96de36324\") " pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.883699 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41942474-c0da-4b4d-8f78-27bca08e7b53-serving-cert\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.883703 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nh9hw\" (UniqueName: \"kubernetes.io/projected/b21e5fdd-5d2a-462d-975a-311b237765aa-kube-api-access-nh9hw\") pod \"openshift-config-operator-7777fb866f-fkbtn\" (UID: \"b21e5fdd-5d2a-462d-975a-311b237765aa\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.883755 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2079a061-35c0-40b3-8591-53decd25d0bf-etcd-serving-ca\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.883809 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2079a061-35c0-40b3-8591-53decd25d0bf-serving-cert\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.883939 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/e5cd6f4c-a2dc-476c-a8e6-2913db00b182-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-x5bs9\" (UID: \"e5cd6f4c-a2dc-476c-a8e6-2913db00b182\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x5bs9" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.884219 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2079a061-35c0-40b3-8591-53decd25d0bf-trusted-ca-bundle\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.885931 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae-srv-cert\") pod \"catalog-operator-68c6474976-bd9d4\" (UID: \"8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.887129 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/2079a061-35c0-40b3-8591-53decd25d0bf-audit\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.897587 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1f5586e5-c2c7-47f6-9022-004baaa3a53c-cert\") pod \"ingress-canary-qtjf6\" (UID: \"1f5586e5-c2c7-47f6-9022-004baaa3a53c\") " pod="openshift-ingress-canary/ingress-canary-qtjf6" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.904929 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgnc5\" (UniqueName: \"kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-kube-api-access-jgnc5\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.927091 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/15099176-32c6-403e-94bc-9a5e1fb8ae9d-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-bfnkp\" (UID: \"15099176-32c6-403e-94bc-9a5e1fb8ae9d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.945543 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kctmf\" (UniqueName: \"kubernetes.io/projected/581ef685-55a6-489c-ab7d-ac0db896aa30-kube-api-access-kctmf\") pod \"downloads-7954f5f757-f27zn\" (UID: \"581ef685-55a6-489c-ab7d-ac0db896aa30\") " pod="openshift-console/downloads-7954f5f757-f27zn" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.953291 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.956470 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:18 crc kubenswrapper[4747]: E1202 16:44:18.957047 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:19.457030474 +0000 UTC m=+89.983919223 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.958875 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtzbf\" (UniqueName: \"kubernetes.io/projected/6031bada-c419-487a-9e05-b52277b916b5-kube-api-access-gtzbf\") pod \"kube-storage-version-migrator-operator-b67b599dd-qwrvt\" (UID: \"6031bada-c419-487a-9e05-b52277b916b5\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qwrvt" Dec 02 16:44:18 crc kubenswrapper[4747]: I1202 16:44:18.981962 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpc5m\" (UniqueName: \"kubernetes.io/projected/ee08e880-d792-46ca-927a-16e6c078726e-kube-api-access-gpc5m\") pod \"cluster-samples-operator-665b6dd947-d4mjf\" (UID: \"ee08e880-d792-46ca-927a-16e6c078726e\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d4mjf" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.002858 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nh7f9\" (UniqueName: \"kubernetes.io/projected/2f1dd59a-4931-4cc0-8105-f036e8e2f72f-kube-api-access-nh7f9\") pod \"control-plane-machine-set-operator-78cbb6b69f-2lxh4\" (UID: \"2f1dd59a-4931-4cc0-8105-f036e8e2f72f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2lxh4" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.015059 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-f27zn" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.017374 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lmt4\" (UniqueName: \"kubernetes.io/projected/fd81d86e-f692-4a5d-885f-9c37fa608b4f-kube-api-access-2lmt4\") pod \"console-f9d7485db-8w6hw\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.052472 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6c87ccc-eaf9-46fb-a066-a9a145bde3c8-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-rxbcg\" (UID: \"a6c87ccc-eaf9-46fb-a066-a9a145bde3c8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxbcg" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.053993 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-bound-sa-token\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.057835 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:19 crc kubenswrapper[4747]: E1202 16:44:19.058165 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:19.558127172 +0000 UTC m=+90.085015921 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.059381 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:19 crc kubenswrapper[4747]: E1202 16:44:19.060378 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:19.560369733 +0000 UTC m=+90.087258482 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.064400 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.081965 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.094077 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wk54\" (UniqueName: \"kubernetes.io/projected/d7899a59-a928-4bd1-895b-b10de5439051-kube-api-access-2wk54\") pod \"migrator-59844c95c7-vq4z9\" (UID: \"d7899a59-a928-4bd1-895b-b10de5439051\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vq4z9" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.104461 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q"] Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.106978 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vpcns"] Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.117866 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwqdt\" (UniqueName: \"kubernetes.io/projected/72d97894-0a82-427f-8376-bea96de36324-kube-api-access-hwqdt\") pod \"marketplace-operator-79b997595-89l9h\" (UID: \"72d97894-0a82-427f-8376-bea96de36324\") " pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.125640 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn" Dec 02 16:44:19 crc kubenswrapper[4747]: W1202 16:44:19.127746 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod11745207_557a_47de_b596_fd2e1c6d4ff7.slice/crio-ca1ef02fbe2ed89a4bcb21f0c5a98e0a53cb2a9551f64b1df5b43d5c65a0af67 WatchSource:0}: Error finding container ca1ef02fbe2ed89a4bcb21f0c5a98e0a53cb2a9551f64b1df5b43d5c65a0af67: Status 404 returned error can't find the container with id ca1ef02fbe2ed89a4bcb21f0c5a98e0a53cb2a9551f64b1df5b43d5c65a0af67 Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.133640 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr979\" (UniqueName: \"kubernetes.io/projected/9f18ce1c-261b-4c18-b729-c3d24fceb581-kube-api-access-fr979\") pod \"service-ca-operator-777779d784-9vbnt\" (UID: \"9f18ce1c-261b-4c18-b729-c3d24fceb581\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vbnt" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.146242 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vbnt" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.161628 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:19 crc kubenswrapper[4747]: E1202 16:44:19.163950 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:19.663896738 +0000 UTC m=+90.190785487 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.166694 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssn5m\" (UniqueName: \"kubernetes.io/projected/e192bf89-81aa-4141-be73-14cef0f2ba6e-kube-api-access-ssn5m\") pod \"multus-admission-controller-857f4d67dd-224n4\" (UID: \"e192bf89-81aa-4141-be73-14cef0f2ba6e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-224n4" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.168450 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" Dec 02 16:44:19 crc kubenswrapper[4747]: W1202 16:44:19.179684 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64fb89ca_43dd_47d7_a8ab_8e5525df5c59.slice/crio-4869c1974ddbe5f3c93ecec1023c6aaaeb9aaff17002c99bb45676496f59f327 WatchSource:0}: Error finding container 4869c1974ddbe5f3c93ecec1023c6aaaeb9aaff17002c99bb45676496f59f327: Status 404 returned error can't find the container with id 4869c1974ddbe5f3c93ecec1023c6aaaeb9aaff17002c99bb45676496f59f327 Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.183097 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2bz9\" (UniqueName: \"kubernetes.io/projected/2079a061-35c0-40b3-8591-53decd25d0bf-kube-api-access-m2bz9\") pod \"apiserver-76f77b778f-dnt2d\" (UID: \"2079a061-35c0-40b3-8591-53decd25d0bf\") " pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.199766 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxqgc\" (UniqueName: \"kubernetes.io/projected/2530933f-b67d-4f7e-93c2-3edf83af285b-kube-api-access-nxqgc\") pod \"collect-profiles-29411550-2s6h7\" (UID: \"2530933f-b67d-4f7e-93c2-3edf83af285b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.221306 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.222386 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6blhw\" (UniqueName: \"kubernetes.io/projected/900274ee-b879-496e-bc26-a3cd59bd6536-kube-api-access-6blhw\") pod \"service-ca-9c57cc56f-gn7ft\" (UID: \"900274ee-b879-496e-bc26-a3cd59bd6536\") " pod="openshift-service-ca/service-ca-9c57cc56f-gn7ft" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.237348 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d4mjf" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.237594 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbvn8\" (UniqueName: \"kubernetes.io/projected/ea23bfbb-080b-479c-8971-594045cdc2a4-kube-api-access-cbvn8\") pod \"route-controller-manager-6576b87f9c-8n5g7\" (UID: \"ea23bfbb-080b-479c-8971-594045cdc2a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.244202 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qwrvt" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.265984 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:19 crc kubenswrapper[4747]: E1202 16:44:19.266408 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:19.766388214 +0000 UTC m=+90.293276963 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.267570 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-c2h5g"] Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.270518 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2lxh4" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.271546 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkbfv\" (UniqueName: \"kubernetes.io/projected/da1ec67a-b576-4f86-8b98-272f3e4ed165-kube-api-access-tkbfv\") pod \"dns-default-c4zkq\" (UID: \"da1ec67a-b576-4f86-8b98-272f3e4ed165\") " pod="openshift-dns/dns-default-c4zkq" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.278730 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcgg2\" (UniqueName: \"kubernetes.io/projected/42ae6217-8ecb-40e5-9713-04ef37a93c8d-kube-api-access-wcgg2\") pod \"packageserver-d55dfcdfc-pktz2\" (UID: \"42ae6217-8ecb-40e5-9713-04ef37a93c8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" Dec 02 16:44:19 crc kubenswrapper[4747]: W1202 16:44:19.294066 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddb83e8c9_a2d3_4bf3_a66f_e6df7670a420.slice/crio-549272a4e6bba69c6c5fa8af1104f0d48dd93677f6c34321cdf1775a62af1a50 WatchSource:0}: Error finding container 549272a4e6bba69c6c5fa8af1104f0d48dd93677f6c34321cdf1775a62af1a50: Status 404 returned error can't find the container with id 549272a4e6bba69c6c5fa8af1104f0d48dd93677f6c34321cdf1775a62af1a50 Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.298513 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c50840fc-2cea-4e15-ab40-f49eb59662e4-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9vtgt\" (UID: \"c50840fc-2cea-4e15-ab40-f49eb59662e4\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9vtgt" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.302527 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" event={"ID":"9330ef08-f76f-4166-a6c0-a3275375c9f1","Type":"ContainerStarted","Data":"f506d59dd9ce1e0d4ee8e812a62265c944b9df2b6a553e345bf581dd2ae38d01"} Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.309163 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" event={"ID":"11745207-557a-47de-b596-fd2e1c6d4ff7","Type":"ContainerStarted","Data":"ca1ef02fbe2ed89a4bcb21f0c5a98e0a53cb2a9551f64b1df5b43d5c65a0af67"} Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.319838 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" event={"ID":"6068bf0f-7a74-42e0-af1f-8d7d79c174ca","Type":"ContainerStarted","Data":"2fd4eb2016fd8df5f916d592d1a6befeb49bf4c784135fcef7bdef6d13f4b542"} Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.335582 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjpx6\" (UniqueName: \"kubernetes.io/projected/1f5586e5-c2c7-47f6-9022-004baaa3a53c-kube-api-access-hjpx6\") pod \"ingress-canary-qtjf6\" (UID: \"1f5586e5-c2c7-47f6-9022-004baaa3a53c\") " pod="openshift-ingress-canary/ingress-canary-qtjf6" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.335819 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxbcg" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.336167 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" event={"ID":"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67","Type":"ContainerStarted","Data":"3099eb42bb2098850abcd3885f7ebebee14f5f17c91dc6a146cec7fb34810032"} Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.336241 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.341596 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vpcns" event={"ID":"64fb89ca-43dd-47d7-a8ab-8e5525df5c59","Type":"ContainerStarted","Data":"4869c1974ddbe5f3c93ecec1023c6aaaeb9aaff17002c99bb45676496f59f327"} Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.343058 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.343452 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" event={"ID":"4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0","Type":"ContainerStarted","Data":"39db7ef6d834e3fc551e72c745bcddaf8a3531094dd1b576f3d9d0b2bc0e1851"} Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.345532 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cd7fp"] Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.351975 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" event={"ID":"7b62bee3-cf8a-46b1-a0d4-fc040830cd5c","Type":"ContainerStarted","Data":"1fa6e8da2d90363d47ef1ae545d63e6d81c66089820e33800d7f0dd3eb687971"} Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.353302 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vntfv" event={"ID":"36d83461-bc1a-4cff-b763-457f2ecfa536","Type":"ContainerStarted","Data":"befdfca2e4b6cd5e3311818cd70fd5ee3f848b5904a8661f782268e6d845c89e"} Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.353813 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sh8jh\" (UniqueName: \"kubernetes.io/projected/acaec0be-3f32-4a76-9f3c-d291c3ceabea-kube-api-access-sh8jh\") pod \"olm-operator-6b444d44fb-jngq7\" (UID: \"acaec0be-3f32-4a76-9f3c-d291c3ceabea\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.355699 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-n2rcw" event={"ID":"0ee24f28-7d17-45f0-9c74-00b2f8b38ed4","Type":"ContainerStarted","Data":"b4c44d1805c85429dfdf5cf7f9ff6ea4ae0a4352a36130346f0990e7d1500e16"} Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.355785 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-n2rcw" event={"ID":"0ee24f28-7d17-45f0-9c74-00b2f8b38ed4","Type":"ContainerStarted","Data":"bc7516440d4c23fe7c483b9b49951e322eb940b5caad977a278575530a48ac98"} Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.356928 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-n2rcw" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.358944 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7v6x8" event={"ID":"7e06e723-6eff-4c1d-ac75-e063f99e039f","Type":"ContainerStarted","Data":"6aee7b49ca78cdf7beeec3a4e71e769421c357453962bb6ae105abe70c7b613b"} Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.360489 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-qmfcd" event={"ID":"04e7d78c-36d3-4997-8ed4-62c494eda7d3","Type":"ContainerStarted","Data":"162488350d74acd9d893b5bd762caa7d50b62df813b6139fed36fb9f213e730f"} Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.364337 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hn8rq\" (UniqueName: \"kubernetes.io/projected/8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae-kube-api-access-hn8rq\") pod \"catalog-operator-68c6474976-bd9d4\" (UID: \"8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.366437 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:19 crc kubenswrapper[4747]: E1202 16:44:19.368196 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:19.868176611 +0000 UTC m=+90.395065360 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.372084 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vq4z9" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.373978 4747 patch_prober.go:28] interesting pod/console-operator-58897d9998-n2rcw container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/readyz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.374035 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-n2rcw" podUID="0ee24f28-7d17-45f0-9c74-00b2f8b38ed4" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.8:8443/readyz\": dial tcp 10.217.0.8:8443: connect: connection refused" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.379862 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4jzs\" (UniqueName: \"kubernetes.io/projected/e0089ce4-1297-4ffc-8375-bc9c553e0fb7-kube-api-access-j4jzs\") pod \"machine-config-server-npzxm\" (UID: \"e0089ce4-1297-4ffc-8375-bc9c553e0fb7\") " pod="openshift-machine-config-operator/machine-config-server-npzxm" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.381804 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9vtgt" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.389490 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.397818 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-224n4" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.404481 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfrvc\" (UniqueName: \"kubernetes.io/projected/ae95be13-3031-4f2a-8e4f-a6d5742c246c-kube-api-access-lfrvc\") pod \"machine-config-operator-74547568cd-gjmw6\" (UID: \"ae95be13-3031-4f2a-8e4f-a6d5742c246c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.415826 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.420115 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzt8l\" (UniqueName: \"kubernetes.io/projected/48b44473-6607-4bce-8136-3d355e5ff018-kube-api-access-lzt8l\") pod \"csi-hostpathplugin-b79s7\" (UID: \"48b44473-6607-4bce-8136-3d355e5ff018\") " pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.435128 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.459216 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.460943 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-npzxm" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.469036 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d29rx\" (UniqueName: \"kubernetes.io/projected/41942474-c0da-4b4d-8f78-27bca08e7b53-kube-api-access-d29rx\") pod \"etcd-operator-b45778765-lstcr\" (UID: \"41942474-c0da-4b4d-8f78-27bca08e7b53\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.472126 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg9cz\" (UniqueName: \"kubernetes.io/projected/e5cd6f4c-a2dc-476c-a8e6-2913db00b182-kube-api-access-kg9cz\") pod \"package-server-manager-789f6589d5-x5bs9\" (UID: \"e5cd6f4c-a2dc-476c-a8e6-2913db00b182\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x5bs9" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.474076 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:19 crc kubenswrapper[4747]: E1202 16:44:19.474485 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:19.974466711 +0000 UTC m=+90.501355470 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.480321 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-gn7ft" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.488966 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.504740 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.513587 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.534479 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-b79s7" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.550604 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-c4zkq" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.558618 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-qtjf6" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.585253 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:19 crc kubenswrapper[4747]: E1202 16:44:19.585801 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:20.085776927 +0000 UTC m=+90.612665676 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.687319 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:19 crc kubenswrapper[4747]: E1202 16:44:19.687966 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:20.187949385 +0000 UTC m=+90.714838134 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.705124 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.735188 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x5bs9" Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.786735 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-f27zn"] Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.790837 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:19 crc kubenswrapper[4747]: E1202 16:44:19.791543 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:20.29151121 +0000 UTC m=+90.818399969 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.893030 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:19 crc kubenswrapper[4747]: E1202 16:44:19.893572 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:20.393546734 +0000 UTC m=+90.920435483 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:19 crc kubenswrapper[4747]: I1202 16:44:19.996111 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:19 crc kubenswrapper[4747]: E1202 16:44:19.997199 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:20.497173231 +0000 UTC m=+91.024061980 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:20 crc kubenswrapper[4747]: W1202 16:44:20.034986 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod581ef685_55a6_489c_ab7d_ac0db896aa30.slice/crio-924c7783f518d0b01ff949b14b0f125202c84bad3493621eb25f4b2d49b24d97 WatchSource:0}: Error finding container 924c7783f518d0b01ff949b14b0f125202c84bad3493621eb25f4b2d49b24d97: Status 404 returned error can't find the container with id 924c7783f518d0b01ff949b14b0f125202c84bad3493621eb25f4b2d49b24d97 Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.097144 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z"] Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.098641 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:20 crc kubenswrapper[4747]: E1202 16:44:20.099815 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:20.599798861 +0000 UTC m=+91.126687610 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.119960 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp"] Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.186646 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-89l9h"] Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.199664 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:20 crc kubenswrapper[4747]: E1202 16:44:20.200023 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:20.700001545 +0000 UTC m=+91.226890294 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:20 crc kubenswrapper[4747]: W1202 16:44:20.201746 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a928a06_0efd_47fe_b677_26ee96cbd922.slice/crio-a11702b6fe9c684ba8112a399053947b0982040d27d98db80741e0e3aff08ea0 WatchSource:0}: Error finding container a11702b6fe9c684ba8112a399053947b0982040d27d98db80741e0e3aff08ea0: Status 404 returned error can't find the container with id a11702b6fe9c684ba8112a399053947b0982040d27d98db80741e0e3aff08ea0 Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.274700 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-9vbnt"] Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.315370 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:20 crc kubenswrapper[4747]: E1202 16:44:20.315703 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:20.815672451 +0000 UTC m=+91.342561200 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.319940 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn"] Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.404177 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-qmfcd" event={"ID":"04e7d78c-36d3-4997-8ed4-62c494eda7d3","Type":"ContainerStarted","Data":"766e407a49ba3ec6e5e0116df8df09d1ea875f11a0d453938e72352f4d513763"} Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.406590 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" event={"ID":"0e56253a-5b24-4cac-8f3f-9b357bc12f82","Type":"ContainerStarted","Data":"f7b8f4996ca1d7b4c8ba111d0c79030fcedd38ef5d597489a211ef90ea5413e7"} Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.416552 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:20 crc kubenswrapper[4747]: E1202 16:44:20.418172 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:20.918151067 +0000 UTC m=+91.445039816 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.424245 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vntfv" event={"ID":"36d83461-bc1a-4cff-b763-457f2ecfa536","Type":"ContainerStarted","Data":"1ba772fd33992c8b45cb09fa2f6852d8f5f5bb0d58bb5ed25ad3088ed87a1e72"} Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.426371 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7v6x8" event={"ID":"7e06e723-6eff-4c1d-ac75-e063f99e039f","Type":"ContainerStarted","Data":"0cd78abab9bead2db917e8c6d56b2070b096174562769270d0f529fbfa7d2fd3"} Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.450860 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.459343 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 16:44:20 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Dec 02 16:44:20 crc kubenswrapper[4747]: [+]process-running ok Dec 02 16:44:20 crc kubenswrapper[4747]: healthz check failed Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.459409 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.486624 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" event={"ID":"7b62bee3-cf8a-46b1-a0d4-fc040830cd5c","Type":"ContainerStarted","Data":"99ab985817a2e55b570b57f9e4b3f767fa77c96b7fbf8715c99fd69b3acc7d4b"} Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.519036 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:20 crc kubenswrapper[4747]: E1202 16:44:20.520677 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:21.020656703 +0000 UTC m=+91.547545452 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.556613 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-npzxm" event={"ID":"e0089ce4-1297-4ffc-8375-bc9c553e0fb7","Type":"ContainerStarted","Data":"d12594ac1d59d5d8973815ed0f4d829744d5ed05411f5adc8e6b27557a17f3c0"} Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.591975 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vpcns" event={"ID":"64fb89ca-43dd-47d7-a8ab-8e5525df5c59","Type":"ContainerStarted","Data":"65a28ab1dd76bf5c3950f609f3f56c80a9c93d62f74881c2f3ea5f567d44f42c"} Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.607929 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-f27zn" event={"ID":"581ef685-55a6-489c-ab7d-ac0db896aa30","Type":"ContainerStarted","Data":"924c7783f518d0b01ff949b14b0f125202c84bad3493621eb25f4b2d49b24d97"} Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.636624 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:20 crc kubenswrapper[4747]: E1202 16:44:20.643991 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:21.143962007 +0000 UTC m=+91.670850766 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.652102 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-c2h5g" event={"ID":"db83e8c9-a2d3-4bf3-a66f-e6df7670a420","Type":"ContainerStarted","Data":"fe2592c7636e0b6e368f980122134cbeeff0c4efa2686e90bfceebd0e391a587"} Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.652346 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-c2h5g" event={"ID":"db83e8c9-a2d3-4bf3-a66f-e6df7670a420","Type":"ContainerStarted","Data":"549272a4e6bba69c6c5fa8af1104f0d48dd93677f6c34321cdf1775a62af1a50"} Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.744382 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:20 crc kubenswrapper[4747]: E1202 16:44:20.744919 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:21.244886311 +0000 UTC m=+91.771775070 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.797404 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" podStartSLOduration=68.797374323 podStartE2EDuration="1m8.797374323s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:20.764413064 +0000 UTC m=+91.291301813" watchObservedRunningTime="2025-12-02 16:44:20.797374323 +0000 UTC m=+91.324263072" Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.801377 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxbcg"] Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.846001 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" event={"ID":"9330ef08-f76f-4166-a6c0-a3275375c9f1","Type":"ContainerStarted","Data":"d93b1e5e4beedcbfb6b72ba8ad29594b1b236e4f9e5999ee288a23a345575646"} Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.847844 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:20 crc kubenswrapper[4747]: E1202 16:44:20.848466 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:21.348431386 +0000 UTC m=+91.875320135 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.849962 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" event={"ID":"11745207-557a-47de-b596-fd2e1c6d4ff7","Type":"ContainerStarted","Data":"d4bb71329d22cf1fe6060a116cc1b16dc8a523abec4444aee53f0e71bbff1969"} Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.863098 4747 generic.go:334] "Generic (PLEG): container finished" podID="6068bf0f-7a74-42e0-af1f-8d7d79c174ca" containerID="e1839f39da496817a51f927ca4327717432775580e18c905a499e297e68a3e1e" exitCode=0 Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.863276 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" event={"ID":"6068bf0f-7a74-42e0-af1f-8d7d79c174ca","Type":"ContainerDied","Data":"e1839f39da496817a51f927ca4327717432775580e18c905a499e297e68a3e1e"} Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.868361 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" event={"ID":"4a1dfdd5-a168-4531-a4a2-2ef4b2115cd0","Type":"ContainerStarted","Data":"15abe371866367f6fde63bfe309fdbd3f17403841e100a4ab72fcf6d8f83581f"} Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.891256 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z" event={"ID":"0a928a06-0efd-47fe-b677-26ee96cbd922","Type":"ContainerStarted","Data":"a11702b6fe9c684ba8112a399053947b0982040d27d98db80741e0e3aff08ea0"} Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.917641 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-8w6hw"] Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.954808 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:20 crc kubenswrapper[4747]: E1202 16:44:20.957770 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:21.457712027 +0000 UTC m=+91.984600776 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.968005 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d4mjf"] Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.977802 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qwrvt"] Dec 02 16:44:20 crc kubenswrapper[4747]: I1202 16:44:20.990571 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2lxh4"] Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.057544 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:21 crc kubenswrapper[4747]: E1202 16:44:21.057728 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:21.557693055 +0000 UTC m=+92.084581814 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.058154 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:21 crc kubenswrapper[4747]: E1202 16:44:21.060565 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:21.560552283 +0000 UTC m=+92.087441032 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.095403 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-n2rcw" podStartSLOduration=69.095357673 podStartE2EDuration="1m9.095357673s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:21.091513728 +0000 UTC m=+91.618402477" watchObservedRunningTime="2025-12-02 16:44:21.095357673 +0000 UTC m=+91.622246422" Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.159194 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:21 crc kubenswrapper[4747]: E1202 16:44:21.159731 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:21.659712628 +0000 UTC m=+92.186601387 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.196190 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-n2rcw" Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.263151 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:21 crc kubenswrapper[4747]: E1202 16:44:21.263798 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:21.763786218 +0000 UTC m=+92.290674967 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.371335 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:21 crc kubenswrapper[4747]: E1202 16:44:21.371863 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:21.871838426 +0000 UTC m=+92.398727175 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.453459 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-qmfcd" podStartSLOduration=69.453438312 podStartE2EDuration="1m9.453438312s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:21.451313064 +0000 UTC m=+91.978201813" watchObservedRunningTime="2025-12-02 16:44:21.453438312 +0000 UTC m=+91.980327061" Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.456796 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 16:44:21 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Dec 02 16:44:21 crc kubenswrapper[4747]: [+]process-running ok Dec 02 16:44:21 crc kubenswrapper[4747]: healthz check failed Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.456863 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.472656 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:21 crc kubenswrapper[4747]: E1202 16:44:21.473077 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:21.973058997 +0000 UTC m=+92.499947746 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.482051 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4"] Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.573215 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:21 crc kubenswrapper[4747]: E1202 16:44:21.573987 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:22.07396992 +0000 UTC m=+92.600858669 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.573896 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-vq4z9"] Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.698811 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:21 crc kubenswrapper[4747]: E1202 16:44:21.699549 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:22.19930548 +0000 UTC m=+92.726194229 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.700917 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7v6x8" podStartSLOduration=69.700863352 podStartE2EDuration="1m9.700863352s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:21.599812526 +0000 UTC m=+92.126701275" watchObservedRunningTime="2025-12-02 16:44:21.700863352 +0000 UTC m=+92.227752101" Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.721863 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7"] Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.799625 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:21 crc kubenswrapper[4747]: E1202 16:44:21.819549 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:22.31952099 +0000 UTC m=+92.846409739 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.821900 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-gj7zq" podStartSLOduration=70.821881424 podStartE2EDuration="1m10.821881424s" podCreationTimestamp="2025-12-02 16:43:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:21.790664112 +0000 UTC m=+92.317552871" watchObservedRunningTime="2025-12-02 16:44:21.821881424 +0000 UTC m=+92.348770173" Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.832361 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-224n4"] Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.847002 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-dnt2d"] Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.915069 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:21 crc kubenswrapper[4747]: E1202 16:44:21.915489 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:22.415473938 +0000 UTC m=+92.942362687 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.970787 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" event={"ID":"72d97894-0a82-427f-8376-bea96de36324","Type":"ContainerStarted","Data":"037b3ffa4ee1bd48b74e3f3e145796e4293bea5b6699330fcb42590862c6dcc3"} Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.971219 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" event={"ID":"72d97894-0a82-427f-8376-bea96de36324","Type":"ContainerStarted","Data":"3fdc9f9ad6f926ba1a2f5c0d600413d5f11fd12dca7e410a252619c0807f3ab8"} Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.974025 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.988846 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn" event={"ID":"b21e5fdd-5d2a-462d-975a-311b237765aa","Type":"ContainerStarted","Data":"54e4b3b71e043e108b9f33b788b95a12787255caf3a2be1e8b40624e4267f97d"} Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.996115 4747 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-89l9h container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" start-of-body= Dec 02 16:44:21 crc kubenswrapper[4747]: I1202 16:44:21.996196 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" podUID="72d97894-0a82-427f-8376-bea96de36324" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.019827 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:22 crc kubenswrapper[4747]: E1202 16:44:22.020233 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:22.520202635 +0000 UTC m=+93.047091384 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.020335 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:22 crc kubenswrapper[4747]: E1202 16:44:22.020753 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:22.52074488 +0000 UTC m=+93.047633629 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.052340 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2lxh4" event={"ID":"2f1dd59a-4931-4cc0-8105-f036e8e2f72f","Type":"ContainerStarted","Data":"9d466151a203a3c0089592dc56ed07c46fe3d4801441bd7b3b02d8139608cbd7"} Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.064979 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" event={"ID":"ea23bfbb-080b-479c-8971-594045cdc2a4","Type":"ContainerStarted","Data":"3375b4576e8bbcc9d2267ca0bf6010c60441f96040c97347dd2fb37e06007bc5"} Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.066258 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qwrvt" event={"ID":"6031bada-c419-487a-9e05-b52277b916b5","Type":"ContainerStarted","Data":"581460bffaf52fb52a00ab67dc792f2561f10589d42e8d42584c09314cd0abf2"} Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.091517 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vntfv" podStartSLOduration=71.09149526 podStartE2EDuration="1m11.09149526s" podCreationTimestamp="2025-12-02 16:43:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:22.082116614 +0000 UTC m=+92.609005373" watchObservedRunningTime="2025-12-02 16:44:22.09149526 +0000 UTC m=+92.618384009" Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.091635 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vq4z9" event={"ID":"d7899a59-a928-4bd1-895b-b10de5439051","Type":"ContainerStarted","Data":"b043186cfa7a88700a4aa396de34b5bfa92f38ccd5dd8ff81e01ba6e1118b459"} Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.094311 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxbcg" event={"ID":"a6c87ccc-eaf9-46fb-a066-a9a145bde3c8","Type":"ContainerStarted","Data":"ba69c7c7fb13520aade9659b950b96eed758cbdc60e29dc45e61ae67229c40c4"} Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.095197 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8w6hw" event={"ID":"fd81d86e-f692-4a5d-885f-9c37fa608b4f","Type":"ContainerStarted","Data":"a89bac04ade08d36ce9cbdd3f06311a5b912657e1d2be08bc1d0def204601206"} Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.096000 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vpcns" podStartSLOduration=70.095984952 podStartE2EDuration="1m10.095984952s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:21.914238584 +0000 UTC m=+92.441127333" watchObservedRunningTime="2025-12-02 16:44:22.095984952 +0000 UTC m=+92.622873711" Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.096054 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4" event={"ID":"8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae","Type":"ContainerStarted","Data":"947dd4bddf99e2a08d171f17e84528e4a9d3d97c860349d3f18f60fb08fafe5e"} Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.099895 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp" event={"ID":"15099176-32c6-403e-94bc-9a5e1fb8ae9d","Type":"ContainerStarted","Data":"f7dfb510f1e1d8611039554684799d40e8c5da4823cfbc777be6f8e9d0586f6d"} Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.108956 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-f27zn" event={"ID":"581ef685-55a6-489c-ab7d-ac0db896aa30","Type":"ContainerStarted","Data":"e6e65a23d10fb4592bcd31a974e7ffbe218528c7e1407494b09a9fba673879b5"} Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.109577 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-f27zn" Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.112052 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vbnt" event={"ID":"9f18ce1c-261b-4c18-b729-c3d24fceb581","Type":"ContainerStarted","Data":"c29d1d7642b7ce56221d7cf8d4b4364b8bb59cf0a939f3ec00d9957ed1dcb8fb"} Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.127044 4747 patch_prober.go:28] interesting pod/downloads-7954f5f757-f27zn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.127086 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-f27zn" podUID="581ef685-55a6-489c-ab7d-ac0db896aa30" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.127576 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:22 crc kubenswrapper[4747]: E1202 16:44:22.128595 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:22.628580772 +0000 UTC m=+93.155469521 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.146254 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" event={"ID":"9330ef08-f76f-4166-a6c0-a3275375c9f1","Type":"ContainerStarted","Data":"79fa583af7bbbf9db066b82102731c85dbe2431e85993cdd4c7b7d0dfbd5fa8b"} Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.167025 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-c4zkq"] Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.190556 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" podStartSLOduration=70.190524762 podStartE2EDuration="1m10.190524762s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:22.189573786 +0000 UTC m=+92.716462535" watchObservedRunningTime="2025-12-02 16:44:22.190524762 +0000 UTC m=+92.717413521" Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.209218 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-npzxm" event={"ID":"e0089ce4-1297-4ffc-8375-bc9c553e0fb7","Type":"ContainerStarted","Data":"e31157b1ab0e9523e52f5d7faef71ff82237606796fd43a216db25845138b246"} Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.232877 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:22 crc kubenswrapper[4747]: E1202 16:44:22.234779 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:22.734766329 +0000 UTC m=+93.261655078 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.268880 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" event={"ID":"11745207-557a-47de-b596-fd2e1c6d4ff7","Type":"ContainerStarted","Data":"265e4b20fdd27dc3e8f9574fd50f64f6c6183bf201a8ff3a081cabe4c01d532f"} Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.337244 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:22 crc kubenswrapper[4747]: E1202 16:44:22.337639 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:22.837624934 +0000 UTC m=+93.364513683 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.349282 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-c2h5g" podStartSLOduration=70.349251671 podStartE2EDuration="1m10.349251671s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:22.326068299 +0000 UTC m=+92.852957038" watchObservedRunningTime="2025-12-02 16:44:22.349251671 +0000 UTC m=+92.876140420" Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.352092 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x5bs9"] Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.447560 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.448179 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-dn6td" podStartSLOduration=70.448149609 podStartE2EDuration="1m10.448149609s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:22.372321621 +0000 UTC m=+92.899210390" watchObservedRunningTime="2025-12-02 16:44:22.448149609 +0000 UTC m=+92.975038358" Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.448845 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-qtjf6"] Dec 02 16:44:22 crc kubenswrapper[4747]: E1202 16:44:22.466789 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:22.966771277 +0000 UTC m=+93.493660026 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.533310 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-npzxm" podStartSLOduration=6.533288562 podStartE2EDuration="6.533288562s" podCreationTimestamp="2025-12-02 16:44:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:22.449560138 +0000 UTC m=+92.976448897" watchObservedRunningTime="2025-12-02 16:44:22.533288562 +0000 UTC m=+93.060177311" Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.535610 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9vtgt"] Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.551982 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:22 crc kubenswrapper[4747]: E1202 16:44:22.552455 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:23.052433464 +0000 UTC m=+93.579322213 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.559716 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 16:44:22 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Dec 02 16:44:22 crc kubenswrapper[4747]: [+]process-running ok Dec 02 16:44:22 crc kubenswrapper[4747]: healthz check failed Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.559780 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.593266 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gn7ft"] Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.595156 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2"] Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.612104 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-b79s7"] Dec 02 16:44:22 crc kubenswrapper[4747]: W1202 16:44:22.635801 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f5586e5_c2c7_47f6_9022_004baaa3a53c.slice/crio-e7c9bf73830ed3f83a2670511e754161b254929b44facff5045c985668adebb8 WatchSource:0}: Error finding container e7c9bf73830ed3f83a2670511e754161b254929b44facff5045c985668adebb8: Status 404 returned error can't find the container with id e7c9bf73830ed3f83a2670511e754161b254929b44facff5045c985668adebb8 Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.645051 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6"] Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.653817 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.653998 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7"] Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.654052 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-f27zn" podStartSLOduration=70.654036286 podStartE2EDuration="1m10.654036286s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:22.516279618 +0000 UTC m=+93.043168367" watchObservedRunningTime="2025-12-02 16:44:22.654036286 +0000 UTC m=+93.180925035" Dec 02 16:44:22 crc kubenswrapper[4747]: E1202 16:44:22.654422 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:23.154405166 +0000 UTC m=+93.681293915 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.676946 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-lstcr"] Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.684477 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-zsp6q" podStartSLOduration=70.684445366 podStartE2EDuration="1m10.684445366s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:22.552816025 +0000 UTC m=+93.079704774" watchObservedRunningTime="2025-12-02 16:44:22.684445366 +0000 UTC m=+93.211334115" Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.689715 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7"] Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.760085 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:22 crc kubenswrapper[4747]: E1202 16:44:22.760510 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:23.260493021 +0000 UTC m=+93.787381770 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:22 crc kubenswrapper[4747]: W1202 16:44:22.762191 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod42ae6217_8ecb_40e5_9713_04ef37a93c8d.slice/crio-81c77a5dae63d5d5b6ff27306778625801df5ec1c381cbc1da8ab308f8bd90c5 WatchSource:0}: Error finding container 81c77a5dae63d5d5b6ff27306778625801df5ec1c381cbc1da8ab308f8bd90c5: Status 404 returned error can't find the container with id 81c77a5dae63d5d5b6ff27306778625801df5ec1c381cbc1da8ab308f8bd90c5 Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.861517 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:22 crc kubenswrapper[4747]: E1202 16:44:22.862339 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:23.362322839 +0000 UTC m=+93.889211588 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:22 crc kubenswrapper[4747]: I1202 16:44:22.968987 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:22 crc kubenswrapper[4747]: E1202 16:44:22.969556 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:23.469532594 +0000 UTC m=+93.996421353 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.070802 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:23 crc kubenswrapper[4747]: E1202 16:44:23.071626 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:23.571606969 +0000 UTC m=+94.098495718 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.173598 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:23 crc kubenswrapper[4747]: E1202 16:44:23.174173 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:23.674150806 +0000 UTC m=+94.201039555 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.279841 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:23 crc kubenswrapper[4747]: E1202 16:44:23.280673 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:23.780647812 +0000 UTC m=+94.307536561 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.301641 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp" event={"ID":"15099176-32c6-403e-94bc-9a5e1fb8ae9d","Type":"ContainerStarted","Data":"56f815e63533f3360bcde5143640450c859098defd21e1c28b2ccac2f0f70bce"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.314703 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7" event={"ID":"acaec0be-3f32-4a76-9f3c-d291c3ceabea","Type":"ContainerStarted","Data":"5fb1c03932eb911b72ddc076b60d86b341af38024f8ba1394e7de1441e66d6bc"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.330083 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxbcg" event={"ID":"a6c87ccc-eaf9-46fb-a066-a9a145bde3c8","Type":"ContainerStarted","Data":"9c228b95846e64b8cdd844c8d72e1f3d81f1eeb6e84e765c44f101ad3ba622c0"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.337132 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-bfnkp" podStartSLOduration=71.337108812 podStartE2EDuration="1m11.337108812s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:23.336171567 +0000 UTC m=+93.863060326" watchObservedRunningTime="2025-12-02 16:44:23.337108812 +0000 UTC m=+93.863997561" Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.367762 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" event={"ID":"0e56253a-5b24-4cac-8f3f-9b357bc12f82","Type":"ContainerStarted","Data":"7c1b109f9951a30d908b8b38a55cc7416cbc5f10a634339b761f94d197730771"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.368625 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.385225 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:23 crc kubenswrapper[4747]: E1202 16:44:23.385546 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:23.885530374 +0000 UTC m=+94.412419123 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.398153 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9vtgt" event={"ID":"c50840fc-2cea-4e15-ab40-f49eb59662e4","Type":"ContainerStarted","Data":"b4dea58c611fb53c9981a773480426d88dd69fdfdc14804e84b6e4dc0cc062dd"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.408157 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2lxh4" event={"ID":"2f1dd59a-4931-4cc0-8105-f036e8e2f72f","Type":"ContainerStarted","Data":"8a1cace554dcecdfdf984fd041b9e37f9b812f0db3e9d382549680c90debfac3"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.424405 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-rxbcg" podStartSLOduration=71.424375433 podStartE2EDuration="1m11.424375433s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:23.374618346 +0000 UTC m=+93.901507095" watchObservedRunningTime="2025-12-02 16:44:23.424375433 +0000 UTC m=+93.951264182" Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.442341 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x5bs9" event={"ID":"e5cd6f4c-a2dc-476c-a8e6-2913db00b182","Type":"ContainerStarted","Data":"014b6ec6f30c64bb94fdba54967eda87cff459b881ad2990b44caeaed4a7bfc6"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.442405 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x5bs9" event={"ID":"e5cd6f4c-a2dc-476c-a8e6-2913db00b182","Type":"ContainerStarted","Data":"838e9659c20c0a13fe49763166b33c80e5b6d6e4376608b0bb553fcebc370438"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.474559 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 16:44:23 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Dec 02 16:44:23 crc kubenswrapper[4747]: [+]process-running ok Dec 02 16:44:23 crc kubenswrapper[4747]: healthz check failed Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.474630 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.475529 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" podStartSLOduration=72.475517729 podStartE2EDuration="1m12.475517729s" podCreationTimestamp="2025-12-02 16:43:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:23.425800932 +0000 UTC m=+93.952689701" watchObservedRunningTime="2025-12-02 16:44:23.475517729 +0000 UTC m=+94.002406478" Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.483327 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-2lxh4" podStartSLOduration=71.483285621 podStartE2EDuration="1m11.483285621s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:23.475159969 +0000 UTC m=+94.002048718" watchObservedRunningTime="2025-12-02 16:44:23.483285621 +0000 UTC m=+94.010174380" Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.494846 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" event={"ID":"ea23bfbb-080b-479c-8971-594045cdc2a4","Type":"ContainerStarted","Data":"59fcd9d532bdebdfba5c40945814218e3549ebaead87e49be862a7f95f2abc4a"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.541209 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.517210 4747 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-8n5g7 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.541365 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" podUID="ea23bfbb-080b-479c-8971-594045cdc2a4" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.543183 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.549133 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" podStartSLOduration=71.549109136 podStartE2EDuration="1m11.549109136s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:23.549002244 +0000 UTC m=+94.075890993" watchObservedRunningTime="2025-12-02 16:44:23.549109136 +0000 UTC m=+94.075997895" Dec 02 16:44:23 crc kubenswrapper[4747]: E1202 16:44:23.549349 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:24.049332673 +0000 UTC m=+94.576221422 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.564015 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z" event={"ID":"0a928a06-0efd-47fe-b677-26ee96cbd922","Type":"ContainerStarted","Data":"0486b76201ce27d0c4abc7c535def42193ba69b05bef158c1c788aba65eb79bb"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.564076 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z" event={"ID":"0a928a06-0efd-47fe-b677-26ee96cbd922","Type":"ContainerStarted","Data":"9bb3cd490f799663e6da75a34309654ca95eefca05224fdd85e3e1fcbb6eeabe"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.594616 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjl6z" podStartSLOduration=71.594598687 podStartE2EDuration="1m11.594598687s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:23.592726106 +0000 UTC m=+94.119614855" watchObservedRunningTime="2025-12-02 16:44:23.594598687 +0000 UTC m=+94.121487436" Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.602190 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" event={"ID":"7b62bee3-cf8a-46b1-a0d4-fc040830cd5c","Type":"ContainerStarted","Data":"250a914a7d64e94a35aa34742942b4a24e50210f08d151f3e98c4b270b8921e6"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.610398 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-gn7ft" event={"ID":"900274ee-b879-496e-bc26-a3cd59bd6536","Type":"ContainerStarted","Data":"3f56124d1778f9c278bd8e34552eb7a94a58006499cfcaf2429e7a82d90d5d84"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.616831 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qwrvt" event={"ID":"6031bada-c419-487a-9e05-b52277b916b5","Type":"ContainerStarted","Data":"f8d367c4dbd006007c49b349da2e4e832c444eb26f41d3ddf2c2ba68f016a481"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.655326 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:23 crc kubenswrapper[4747]: E1202 16:44:23.658372 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:24.158341766 +0000 UTC m=+94.685230515 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.660325 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8w6hw" event={"ID":"fd81d86e-f692-4a5d-885f-9c37fa608b4f","Type":"ContainerStarted","Data":"6b33a4f877f904bf8f65afe953b16774583114421f9d95c00c10f1adf4db4d09"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.697448 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s2lq2" podStartSLOduration=72.697425413 podStartE2EDuration="1m12.697425413s" podCreationTimestamp="2025-12-02 16:43:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:23.654231354 +0000 UTC m=+94.181120103" watchObservedRunningTime="2025-12-02 16:44:23.697425413 +0000 UTC m=+94.224314162" Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.699663 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d4mjf" event={"ID":"ee08e880-d792-46ca-927a-16e6c078726e","Type":"ContainerStarted","Data":"5445e8288567f838c2eddd37f5e6254b3b8841822600577187484680f87ea69f"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.723539 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vbnt" event={"ID":"9f18ce1c-261b-4c18-b729-c3d24fceb581","Type":"ContainerStarted","Data":"c95e7ddfbde73d5c6d83cf56c73a66a526c8e08e1db0d449a75b9ca2d15e2fab"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.744882 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-qwrvt" podStartSLOduration=71.744857707 podStartE2EDuration="1m11.744857707s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:23.699313874 +0000 UTC m=+94.226202623" watchObservedRunningTime="2025-12-02 16:44:23.744857707 +0000 UTC m=+94.271746456" Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.757678 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" event={"ID":"ae95be13-3031-4f2a-8e4f-a6d5742c246c","Type":"ContainerStarted","Data":"f71e4f12e552cf80a5a29501504e018cb296efe51cf87a710856a1c1305ac553"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.758706 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:23 crc kubenswrapper[4747]: E1202 16:44:23.760976 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:24.260959506 +0000 UTC m=+94.787848255 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.791079 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-8w6hw" podStartSLOduration=71.791052927 podStartE2EDuration="1m11.791052927s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:23.748665671 +0000 UTC m=+94.275554430" watchObservedRunningTime="2025-12-02 16:44:23.791052927 +0000 UTC m=+94.317941676" Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.860036 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.861724 4747 generic.go:334] "Generic (PLEG): container finished" podID="b21e5fdd-5d2a-462d-975a-311b237765aa" containerID="6b4995f83c14a82bb4bfb33ef384dfcb0b01eba1dd9be26c0428065b28eee432" exitCode=0 Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.862313 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn" event={"ID":"b21e5fdd-5d2a-462d-975a-311b237765aa","Type":"ContainerDied","Data":"6b4995f83c14a82bb4bfb33ef384dfcb0b01eba1dd9be26c0428065b28eee432"} Dec 02 16:44:23 crc kubenswrapper[4747]: E1202 16:44:23.862806 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:24.362770404 +0000 UTC m=+94.889659163 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.896525 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-9vbnt" podStartSLOduration=71.896509974 podStartE2EDuration="1m11.896509974s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:23.793061782 +0000 UTC m=+94.319950531" watchObservedRunningTime="2025-12-02 16:44:23.896509974 +0000 UTC m=+94.423398723" Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.898614 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-224n4" event={"ID":"e192bf89-81aa-4141-be73-14cef0f2ba6e","Type":"ContainerStarted","Data":"aa6fd69cf768316e377234257d9da6627e11c6a34350873fa0c206a109a4055c"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.898648 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-224n4" event={"ID":"e192bf89-81aa-4141-be73-14cef0f2ba6e","Type":"ContainerStarted","Data":"24b0af5f78b0fcb01050e5ebf659213d30986dea37c4ae455456f69ef2ebeec6"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.899907 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" event={"ID":"42ae6217-8ecb-40e5-9713-04ef37a93c8d","Type":"ContainerStarted","Data":"81c77a5dae63d5d5b6ff27306778625801df5ec1c381cbc1da8ab308f8bd90c5"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.933999 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" event={"ID":"41942474-c0da-4b4d-8f78-27bca08e7b53","Type":"ContainerStarted","Data":"36a877ee43707cb402582a7b28637a784fef82eaeb190960bada0a102b403375"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.951645 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4" event={"ID":"8c5c00b0-33b4-4cd2-b427-f3b1d38c74ae","Type":"ContainerStarted","Data":"38c5067429a3cf8721f0f1a12d78e525f5da8d7dc0bc6306da03a014cb435933"} Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.952605 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4" Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.962857 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:23 crc kubenswrapper[4747]: E1202 16:44:23.964686 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:24.464668174 +0000 UTC m=+94.991556923 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:23 crc kubenswrapper[4747]: I1202 16:44:23.976839 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-b79s7" event={"ID":"48b44473-6607-4bce-8136-3d355e5ff018","Type":"ContainerStarted","Data":"45872f8c22fb09e5e096e57a86f3f18ba6d9b9ddfa4c00d80edcc9e85e827ed8"} Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.017925 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vq4z9" event={"ID":"d7899a59-a928-4bd1-895b-b10de5439051","Type":"ContainerStarted","Data":"a70b7e44a74de40b3b37b798bf2d631adda93e99df3e63ee7a43a32b4410a028"} Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.018209 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vq4z9" event={"ID":"d7899a59-a928-4bd1-895b-b10de5439051","Type":"ContainerStarted","Data":"dae8cbe42bf26bfeeb1d7f79c4250264708b7788accf120267209b8264bcddb1"} Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.029503 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-c4zkq" event={"ID":"da1ec67a-b576-4f86-8b98-272f3e4ed165","Type":"ContainerStarted","Data":"e578d974494b7fb3b19f8c7d582bd6cb64fe6ca0585f63e50550cf8ea99e24ba"} Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.031265 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-qtjf6" event={"ID":"1f5586e5-c2c7-47f6-9022-004baaa3a53c","Type":"ContainerStarted","Data":"e7c9bf73830ed3f83a2670511e754161b254929b44facff5045c985668adebb8"} Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.033028 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4" Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.034578 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" event={"ID":"2530933f-b67d-4f7e-93c2-3edf83af285b","Type":"ContainerStarted","Data":"6b3cf2c24302386fdd5ab194c606c8ba262d9031897b544c2e928f94df07959d"} Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.050172 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bd9d4" podStartSLOduration=72.050144866 podStartE2EDuration="1m12.050144866s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:23.997644624 +0000 UTC m=+94.524533373" watchObservedRunningTime="2025-12-02 16:44:24.050144866 +0000 UTC m=+94.577033615" Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.055386 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vq4z9" podStartSLOduration=72.055364738 podStartE2EDuration="1m12.055364738s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:24.043891485 +0000 UTC m=+94.570780234" watchObservedRunningTime="2025-12-02 16:44:24.055364738 +0000 UTC m=+94.582253487" Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.065249 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:24 crc kubenswrapper[4747]: E1202 16:44:24.066647 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:24.566626276 +0000 UTC m=+95.093515025 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.069157 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-c2h5g" event={"ID":"db83e8c9-a2d3-4bf3-a66f-e6df7670a420","Type":"ContainerStarted","Data":"98cbc3025f788aeea6c262a87a5478ec20509c8096f0a43f6694f2c7a6105864"} Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.071845 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" event={"ID":"6068bf0f-7a74-42e0-af1f-8d7d79c174ca","Type":"ContainerStarted","Data":"82925351ae374cd36eb58b6e19a4b22c633ec76f0b59e3444d8645f376933b15"} Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.108566 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" podStartSLOduration=72.108539669 podStartE2EDuration="1m12.108539669s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:24.106156344 +0000 UTC m=+94.633045093" watchObservedRunningTime="2025-12-02 16:44:24.108539669 +0000 UTC m=+94.635428418" Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.119427 4747 generic.go:334] "Generic (PLEG): container finished" podID="2079a061-35c0-40b3-8591-53decd25d0bf" containerID="cbe3f5f3f7fde956d757bbf18d2eef1645de2e1ef9129679d9e5b28165c7980f" exitCode=0 Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.120943 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" event={"ID":"2079a061-35c0-40b3-8591-53decd25d0bf","Type":"ContainerDied","Data":"cbe3f5f3f7fde956d757bbf18d2eef1645de2e1ef9129679d9e5b28165c7980f"} Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.121006 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" event={"ID":"2079a061-35c0-40b3-8591-53decd25d0bf","Type":"ContainerStarted","Data":"42f22d79ab20c34213f89a63281400d360cf870824b69c5d66574cdfa4cb8348"} Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.133599 4747 patch_prober.go:28] interesting pod/downloads-7954f5f757-f27zn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.133650 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-f27zn" podUID="581ef685-55a6-489c-ab7d-ac0db896aa30" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.133732 4747 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-89l9h container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" start-of-body= Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.133750 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" podUID="72d97894-0a82-427f-8376-bea96de36324" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.36:8080/healthz\": dial tcp 10.217.0.36:8080: connect: connection refused" Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.168645 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:24 crc kubenswrapper[4747]: E1202 16:44:24.171017 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:24.670997113 +0000 UTC m=+95.197885932 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.222536 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" podStartSLOduration=72.222509738 podStartE2EDuration="1m12.222509738s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:24.169639686 +0000 UTC m=+94.696528435" watchObservedRunningTime="2025-12-02 16:44:24.222509738 +0000 UTC m=+94.749398487" Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.269488 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:24 crc kubenswrapper[4747]: E1202 16:44:24.271965 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:24.771899486 +0000 UTC m=+95.298788395 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.322658 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.376221 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:24 crc kubenswrapper[4747]: E1202 16:44:24.376723 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:24.876708805 +0000 UTC m=+95.403597554 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.458678 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 16:44:24 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Dec 02 16:44:24 crc kubenswrapper[4747]: [+]process-running ok Dec 02 16:44:24 crc kubenswrapper[4747]: healthz check failed Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.458749 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.481082 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:24 crc kubenswrapper[4747]: E1202 16:44:24.483685 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:24.983645163 +0000 UTC m=+95.510533912 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.488316 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:24 crc kubenswrapper[4747]: E1202 16:44:24.488695 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:24.98868137 +0000 UTC m=+95.515570109 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.595169 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:24 crc kubenswrapper[4747]: E1202 16:44:24.595608 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:25.095587897 +0000 UTC m=+95.622476646 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.698150 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:24 crc kubenswrapper[4747]: E1202 16:44:24.698569 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:25.198555936 +0000 UTC m=+95.725444685 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.799149 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:24 crc kubenswrapper[4747]: E1202 16:44:24.802332 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:25.302298797 +0000 UTC m=+95.829187546 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:24 crc kubenswrapper[4747]: I1202 16:44:24.904880 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:24 crc kubenswrapper[4747]: E1202 16:44:24.905702 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:25.405687407 +0000 UTC m=+95.932576156 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.005959 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:25 crc kubenswrapper[4747]: E1202 16:44:25.006091 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:25.506073626 +0000 UTC m=+96.032962365 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.006418 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:25 crc kubenswrapper[4747]: E1202 16:44:25.006987 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:25.5069643 +0000 UTC m=+96.033853109 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.107795 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:25 crc kubenswrapper[4747]: E1202 16:44:25.107932 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:25.607913384 +0000 UTC m=+96.134802133 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.108111 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:25 crc kubenswrapper[4747]: E1202 16:44:25.108661 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:25.608638244 +0000 UTC m=+96.135527053 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.170930 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" event={"ID":"2530933f-b67d-4f7e-93c2-3edf83af285b","Type":"ContainerStarted","Data":"ba438233e8df9edb1202f44f7b3e1ec5dfa47560bef619deb81b17a4767a4968"} Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.199190 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-224n4" event={"ID":"e192bf89-81aa-4141-be73-14cef0f2ba6e","Type":"ContainerStarted","Data":"a15a2e7d4969194021eeee130267543cd6aad246959dfd70161dc4089ce04b8c"} Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.210863 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:25 crc kubenswrapper[4747]: E1202 16:44:25.211331 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:25.711311165 +0000 UTC m=+96.238199924 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.211922 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-b79s7" event={"ID":"48b44473-6607-4bce-8136-3d355e5ff018","Type":"ContainerStarted","Data":"112dbdd281b91b0cb4250877a71b48d0e2fb1b91c89a5df28a9333c5dce06d71"} Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.223892 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-224n4" podStartSLOduration=73.223871078 podStartE2EDuration="1m13.223871078s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:25.222366097 +0000 UTC m=+95.749254846" watchObservedRunningTime="2025-12-02 16:44:25.223871078 +0000 UTC m=+95.750759827" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.234821 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-c4zkq" event={"ID":"da1ec67a-b576-4f86-8b98-272f3e4ed165","Type":"ContainerStarted","Data":"b46c142af07be7937e779e5b04923e32cc02d848f4d4de7dbe70a5f85612b4be"} Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.234891 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-c4zkq" event={"ID":"da1ec67a-b576-4f86-8b98-272f3e4ed165","Type":"ContainerStarted","Data":"e359b9a39eb5ed9c704f86c6ade150d674e3537d8d348d2576643f1813df51eb"} Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.235838 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-c4zkq" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.253703 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7" event={"ID":"acaec0be-3f32-4a76-9f3c-d291c3ceabea","Type":"ContainerStarted","Data":"e3dedaf58ac07b7069b7b9f439bc40dc3b26b4b65f4caa2aadfcd75942aa6966"} Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.255259 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.266715 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-qtjf6" event={"ID":"1f5586e5-c2c7-47f6-9022-004baaa3a53c","Type":"ContainerStarted","Data":"16954a541d13e0e065404741bebf00c4712c48e34f002a915c71a023af27435d"} Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.303410 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-c4zkq" podStartSLOduration=9.303384337 podStartE2EDuration="9.303384337s" podCreationTimestamp="2025-12-02 16:44:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:25.266504981 +0000 UTC m=+95.793393730" watchObservedRunningTime="2025-12-02 16:44:25.303384337 +0000 UTC m=+95.830273086" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.308997 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.310590 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn" event={"ID":"b21e5fdd-5d2a-462d-975a-311b237765aa","Type":"ContainerStarted","Data":"8155fdc0661e5d585f2e178affb747ac917f01eb9836b2f6ec029db0a451a9a6"} Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.312371 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.313816 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn" Dec 02 16:44:25 crc kubenswrapper[4747]: E1202 16:44:25.315050 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:25.815027755 +0000 UTC m=+96.341916664 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.324162 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d4mjf" event={"ID":"ee08e880-d792-46ca-927a-16e6c078726e","Type":"ContainerStarted","Data":"090beafa94211b7f5812be9994303908f50f12575b461b28e1df339d44dc832e"} Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.324232 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d4mjf" event={"ID":"ee08e880-d792-46ca-927a-16e6c078726e","Type":"ContainerStarted","Data":"3797b95719da000567667384557ef9cf8aa7e86e370da2a08949cbda930ca0fa"} Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.338077 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jngq7" podStartSLOduration=73.338056563 podStartE2EDuration="1m13.338056563s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:25.30823403 +0000 UTC m=+95.835122799" watchObservedRunningTime="2025-12-02 16:44:25.338056563 +0000 UTC m=+95.864945312" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.339124 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-qtjf6" podStartSLOduration=9.339115632 podStartE2EDuration="9.339115632s" podCreationTimestamp="2025-12-02 16:44:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:25.335994977 +0000 UTC m=+95.862883736" watchObservedRunningTime="2025-12-02 16:44:25.339115632 +0000 UTC m=+95.866004381" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.359380 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9vtgt" event={"ID":"c50840fc-2cea-4e15-ab40-f49eb59662e4","Type":"ContainerStarted","Data":"3220c1ad95a38a49c98601d3e8c144fced81ed8aa5eabbc048bd3e778104ad6e"} Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.378280 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" event={"ID":"42ae6217-8ecb-40e5-9713-04ef37a93c8d","Type":"ContainerStarted","Data":"f238e82e6ee56cc8b12bfe2f199ffa13c5e4c90768e20faf1f441a3127f72e0d"} Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.380427 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.389488 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" event={"ID":"41942474-c0da-4b4d-8f78-27bca08e7b53","Type":"ContainerStarted","Data":"2bd4fa6f80bb31960b6e1070e76f567b107667ab73bf0cba12557045481fcac9"} Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.407156 4747 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-pktz2 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.41:5443/healthz\": dial tcp 10.217.0.41:5443: connect: connection refused" start-of-body= Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.407240 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" podUID="42ae6217-8ecb-40e5-9713-04ef37a93c8d" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.41:5443/healthz\": dial tcp 10.217.0.41:5443: connect: connection refused" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.416001 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:25 crc kubenswrapper[4747]: E1202 16:44:25.416524 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:25.916475953 +0000 UTC m=+96.443364712 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.430532 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" event={"ID":"ae95be13-3031-4f2a-8e4f-a6d5742c246c","Type":"ContainerStarted","Data":"362fa1c2ceab3fab33ae956d3c766c6279899f38005d5321b557f6ed22ab779b"} Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.430599 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" event={"ID":"ae95be13-3031-4f2a-8e4f-a6d5742c246c","Type":"ContainerStarted","Data":"2a98af236415878462e7c28763011f6e6db4644f60f176b96e938d0f3fe65449"} Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.442459 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-gn7ft" event={"ID":"900274ee-b879-496e-bc26-a3cd59bd6536","Type":"ContainerStarted","Data":"97bd19fb9735bbeea36130ee7098d1a0b6bf93293f59d341f5120a9f0504bdd0"} Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.444553 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-d4mjf" podStartSLOduration=73.444538448 podStartE2EDuration="1m13.444538448s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:25.386175166 +0000 UTC m=+95.913063915" watchObservedRunningTime="2025-12-02 16:44:25.444538448 +0000 UTC m=+95.971427197" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.465748 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x5bs9" event={"ID":"e5cd6f4c-a2dc-476c-a8e6-2913db00b182","Type":"ContainerStarted","Data":"126727840ccf66d778028e917dadd6880bf799becda532ec1ef7e8b215279d08"} Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.465817 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x5bs9" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.467599 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vt692"] Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.467690 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 16:44:25 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Dec 02 16:44:25 crc kubenswrapper[4747]: [+]process-running ok Dec 02 16:44:25 crc kubenswrapper[4747]: healthz check failed Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.467736 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.469886 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vt692" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.488504 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.536087 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:25 crc kubenswrapper[4747]: E1202 16:44:25.559189 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:26.059165196 +0000 UTC m=+96.586053945 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.591247 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.641953 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:25 crc kubenswrapper[4747]: E1202 16:44:25.642464 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:26.142439618 +0000 UTC m=+96.669328367 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.642585 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-utilities\") pod \"certified-operators-vt692\" (UID: \"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27\") " pod="openshift-marketplace/certified-operators-vt692" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.642668 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqd6r\" (UniqueName: \"kubernetes.io/projected/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-kube-api-access-hqd6r\") pod \"certified-operators-vt692\" (UID: \"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27\") " pod="openshift-marketplace/certified-operators-vt692" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.642726 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-catalog-content\") pod \"certified-operators-vt692\" (UID: \"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27\") " pod="openshift-marketplace/certified-operators-vt692" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.647187 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vt692"] Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.650368 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn" podStartSLOduration=73.650335263 podStartE2EDuration="1m13.650335263s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:25.533039033 +0000 UTC m=+96.059927782" watchObservedRunningTime="2025-12-02 16:44:25.650335263 +0000 UTC m=+96.177224012" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.712101 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x5bs9" podStartSLOduration=73.712065497 podStartE2EDuration="1m13.712065497s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:25.643328692 +0000 UTC m=+96.170217441" watchObservedRunningTime="2025-12-02 16:44:25.712065497 +0000 UTC m=+96.238954246" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.725823 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" podStartSLOduration=73.725794262 podStartE2EDuration="1m13.725794262s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:25.678846221 +0000 UTC m=+96.205734980" watchObservedRunningTime="2025-12-02 16:44:25.725794262 +0000 UTC m=+96.252683011" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.729449 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gjmw6" podStartSLOduration=73.729429881 podStartE2EDuration="1m13.729429881s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:25.725482383 +0000 UTC m=+96.252371162" watchObservedRunningTime="2025-12-02 16:44:25.729429881 +0000 UTC m=+96.256318630" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.744615 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-utilities\") pod \"certified-operators-vt692\" (UID: \"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27\") " pod="openshift-marketplace/certified-operators-vt692" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.744830 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqd6r\" (UniqueName: \"kubernetes.io/projected/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-kube-api-access-hqd6r\") pod \"certified-operators-vt692\" (UID: \"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27\") " pod="openshift-marketplace/certified-operators-vt692" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.744971 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-catalog-content\") pod \"certified-operators-vt692\" (UID: \"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27\") " pod="openshift-marketplace/certified-operators-vt692" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.745104 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:25 crc kubenswrapper[4747]: E1202 16:44:25.745569 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:26.245552051 +0000 UTC m=+96.772440800 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.746325 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-utilities\") pod \"certified-operators-vt692\" (UID: \"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27\") " pod="openshift-marketplace/certified-operators-vt692" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.747029 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-catalog-content\") pod \"certified-operators-vt692\" (UID: \"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27\") " pod="openshift-marketplace/certified-operators-vt692" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.791152 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqd6r\" (UniqueName: \"kubernetes.io/projected/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-kube-api-access-hqd6r\") pod \"certified-operators-vt692\" (UID: \"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27\") " pod="openshift-marketplace/certified-operators-vt692" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.817763 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-gn7ft" podStartSLOduration=73.81774126 podStartE2EDuration="1m13.81774126s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:25.817433502 +0000 UTC m=+96.344322261" watchObservedRunningTime="2025-12-02 16:44:25.81774126 +0000 UTC m=+96.344630009" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.818624 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9vtgt" podStartSLOduration=73.818613974 podStartE2EDuration="1m13.818613974s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:25.767002746 +0000 UTC m=+96.293891505" watchObservedRunningTime="2025-12-02 16:44:25.818613974 +0000 UTC m=+96.345502743" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.849067 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:25 crc kubenswrapper[4747]: E1202 16:44:25.849237 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:26.349216699 +0000 UTC m=+96.876105448 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.849587 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:25 crc kubenswrapper[4747]: E1202 16:44:25.849895 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:26.349887907 +0000 UTC m=+96.876776656 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.862970 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-lstcr" podStartSLOduration=73.862940773 podStartE2EDuration="1m13.862940773s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:25.858829841 +0000 UTC m=+96.385718590" watchObservedRunningTime="2025-12-02 16:44:25.862940773 +0000 UTC m=+96.389829522" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.868067 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jl6rp"] Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.869390 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jl6rp" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.870606 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jl6rp"] Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.906844 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vt692" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.950853 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.951076 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5c76\" (UniqueName: \"kubernetes.io/projected/6b230695-281f-405b-98c1-eb2e1470889e-kube-api-access-q5c76\") pod \"certified-operators-jl6rp\" (UID: \"6b230695-281f-405b-98c1-eb2e1470889e\") " pod="openshift-marketplace/certified-operators-jl6rp" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.951131 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b230695-281f-405b-98c1-eb2e1470889e-utilities\") pod \"certified-operators-jl6rp\" (UID: \"6b230695-281f-405b-98c1-eb2e1470889e\") " pod="openshift-marketplace/certified-operators-jl6rp" Dec 02 16:44:25 crc kubenswrapper[4747]: I1202 16:44:25.951183 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b230695-281f-405b-98c1-eb2e1470889e-catalog-content\") pod \"certified-operators-jl6rp\" (UID: \"6b230695-281f-405b-98c1-eb2e1470889e\") " pod="openshift-marketplace/certified-operators-jl6rp" Dec 02 16:44:25 crc kubenswrapper[4747]: E1202 16:44:25.951319 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:26.451299383 +0000 UTC m=+96.978188132 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.041043 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vsbt4"] Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.046492 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vsbt4" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.054320 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.054878 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.054892 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vsbt4"] Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.054942 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5c76\" (UniqueName: \"kubernetes.io/projected/6b230695-281f-405b-98c1-eb2e1470889e-kube-api-access-q5c76\") pod \"certified-operators-jl6rp\" (UID: \"6b230695-281f-405b-98c1-eb2e1470889e\") " pod="openshift-marketplace/certified-operators-jl6rp" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.055125 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b230695-281f-405b-98c1-eb2e1470889e-utilities\") pod \"certified-operators-jl6rp\" (UID: \"6b230695-281f-405b-98c1-eb2e1470889e\") " pod="openshift-marketplace/certified-operators-jl6rp" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.055245 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b230695-281f-405b-98c1-eb2e1470889e-catalog-content\") pod \"certified-operators-jl6rp\" (UID: \"6b230695-281f-405b-98c1-eb2e1470889e\") " pod="openshift-marketplace/certified-operators-jl6rp" Dec 02 16:44:26 crc kubenswrapper[4747]: E1202 16:44:26.055494 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:26.555480615 +0000 UTC m=+97.082369364 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.055868 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b230695-281f-405b-98c1-eb2e1470889e-catalog-content\") pod \"certified-operators-jl6rp\" (UID: \"6b230695-281f-405b-98c1-eb2e1470889e\") " pod="openshift-marketplace/certified-operators-jl6rp" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.055983 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b230695-281f-405b-98c1-eb2e1470889e-utilities\") pod \"certified-operators-jl6rp\" (UID: \"6b230695-281f-405b-98c1-eb2e1470889e\") " pod="openshift-marketplace/certified-operators-jl6rp" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.100777 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5c76\" (UniqueName: \"kubernetes.io/projected/6b230695-281f-405b-98c1-eb2e1470889e-kube-api-access-q5c76\") pod \"certified-operators-jl6rp\" (UID: \"6b230695-281f-405b-98c1-eb2e1470889e\") " pod="openshift-marketplace/certified-operators-jl6rp" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.156478 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.156753 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/362f8e5d-8e91-435a-af79-5d318c2288e0-utilities\") pod \"community-operators-vsbt4\" (UID: \"362f8e5d-8e91-435a-af79-5d318c2288e0\") " pod="openshift-marketplace/community-operators-vsbt4" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.156854 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/362f8e5d-8e91-435a-af79-5d318c2288e0-catalog-content\") pod \"community-operators-vsbt4\" (UID: \"362f8e5d-8e91-435a-af79-5d318c2288e0\") " pod="openshift-marketplace/community-operators-vsbt4" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.156892 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qt7j\" (UniqueName: \"kubernetes.io/projected/362f8e5d-8e91-435a-af79-5d318c2288e0-kube-api-access-7qt7j\") pod \"community-operators-vsbt4\" (UID: \"362f8e5d-8e91-435a-af79-5d318c2288e0\") " pod="openshift-marketplace/community-operators-vsbt4" Dec 02 16:44:26 crc kubenswrapper[4747]: E1202 16:44:26.157038 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:26.657023066 +0000 UTC m=+97.183911815 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.245536 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rxrth"] Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.247067 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rxrth" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.258396 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/362f8e5d-8e91-435a-af79-5d318c2288e0-utilities\") pod \"community-operators-vsbt4\" (UID: \"362f8e5d-8e91-435a-af79-5d318c2288e0\") " pod="openshift-marketplace/community-operators-vsbt4" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.258725 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.258752 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/362f8e5d-8e91-435a-af79-5d318c2288e0-catalog-content\") pod \"community-operators-vsbt4\" (UID: \"362f8e5d-8e91-435a-af79-5d318c2288e0\") " pod="openshift-marketplace/community-operators-vsbt4" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.258790 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qt7j\" (UniqueName: \"kubernetes.io/projected/362f8e5d-8e91-435a-af79-5d318c2288e0-kube-api-access-7qt7j\") pod \"community-operators-vsbt4\" (UID: \"362f8e5d-8e91-435a-af79-5d318c2288e0\") " pod="openshift-marketplace/community-operators-vsbt4" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.259899 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/362f8e5d-8e91-435a-af79-5d318c2288e0-utilities\") pod \"community-operators-vsbt4\" (UID: \"362f8e5d-8e91-435a-af79-5d318c2288e0\") " pod="openshift-marketplace/community-operators-vsbt4" Dec 02 16:44:26 crc kubenswrapper[4747]: E1202 16:44:26.260249 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:26.760234672 +0000 UTC m=+97.287123421 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.260674 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/362f8e5d-8e91-435a-af79-5d318c2288e0-catalog-content\") pod \"community-operators-vsbt4\" (UID: \"362f8e5d-8e91-435a-af79-5d318c2288e0\") " pod="openshift-marketplace/community-operators-vsbt4" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.271680 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rxrth"] Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.302995 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qt7j\" (UniqueName: \"kubernetes.io/projected/362f8e5d-8e91-435a-af79-5d318c2288e0-kube-api-access-7qt7j\") pod \"community-operators-vsbt4\" (UID: \"362f8e5d-8e91-435a-af79-5d318c2288e0\") " pod="openshift-marketplace/community-operators-vsbt4" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.321741 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jl6rp" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.361364 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.361669 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-catalog-content\") pod \"community-operators-rxrth\" (UID: \"066ad5e3-8cd4-4a73-895d-76671a0d6aa9\") " pod="openshift-marketplace/community-operators-rxrth" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.361747 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rvbw\" (UniqueName: \"kubernetes.io/projected/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-kube-api-access-9rvbw\") pod \"community-operators-rxrth\" (UID: \"066ad5e3-8cd4-4a73-895d-76671a0d6aa9\") " pod="openshift-marketplace/community-operators-rxrth" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.361808 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-utilities\") pod \"community-operators-rxrth\" (UID: \"066ad5e3-8cd4-4a73-895d-76671a0d6aa9\") " pod="openshift-marketplace/community-operators-rxrth" Dec 02 16:44:26 crc kubenswrapper[4747]: E1202 16:44:26.361978 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:26.861953947 +0000 UTC m=+97.388842706 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.382227 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vsbt4" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.404745 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vt692"] Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.450385 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 16:44:26 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Dec 02 16:44:26 crc kubenswrapper[4747]: [+]process-running ok Dec 02 16:44:26 crc kubenswrapper[4747]: healthz check failed Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.450444 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.462868 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-catalog-content\") pod \"community-operators-rxrth\" (UID: \"066ad5e3-8cd4-4a73-895d-76671a0d6aa9\") " pod="openshift-marketplace/community-operators-rxrth" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.462958 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.463003 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rvbw\" (UniqueName: \"kubernetes.io/projected/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-kube-api-access-9rvbw\") pod \"community-operators-rxrth\" (UID: \"066ad5e3-8cd4-4a73-895d-76671a0d6aa9\") " pod="openshift-marketplace/community-operators-rxrth" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.463067 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-utilities\") pod \"community-operators-rxrth\" (UID: \"066ad5e3-8cd4-4a73-895d-76671a0d6aa9\") " pod="openshift-marketplace/community-operators-rxrth" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.463635 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-utilities\") pod \"community-operators-rxrth\" (UID: \"066ad5e3-8cd4-4a73-895d-76671a0d6aa9\") " pod="openshift-marketplace/community-operators-rxrth" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.463945 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-catalog-content\") pod \"community-operators-rxrth\" (UID: \"066ad5e3-8cd4-4a73-895d-76671a0d6aa9\") " pod="openshift-marketplace/community-operators-rxrth" Dec 02 16:44:26 crc kubenswrapper[4747]: E1202 16:44:26.464109 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:26.964095224 +0000 UTC m=+97.490983973 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.493597 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rvbw\" (UniqueName: \"kubernetes.io/projected/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-kube-api-access-9rvbw\") pod \"community-operators-rxrth\" (UID: \"066ad5e3-8cd4-4a73-895d-76671a0d6aa9\") " pod="openshift-marketplace/community-operators-rxrth" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.498746 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" event={"ID":"2079a061-35c0-40b3-8591-53decd25d0bf","Type":"ContainerStarted","Data":"67ff7caf196edd8bf6d4ea68a7bea16cc6ea8961e5bf805525cda83944a84ac8"} Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.498795 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" event={"ID":"2079a061-35c0-40b3-8591-53decd25d0bf","Type":"ContainerStarted","Data":"2a3fa4c43237fa7c1988e403253ba46b4deab2ca8304131ed72da4d0f96f257c"} Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.502565 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vt692" event={"ID":"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27","Type":"ContainerStarted","Data":"cb99dd5de6c2b502452a8e777f045c09d705abc1522ecde28ec463e769a8d5e4"} Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.525964 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-pktz2" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.566356 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:26 crc kubenswrapper[4747]: E1202 16:44:26.567877 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:27.067861124 +0000 UTC m=+97.594749873 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.569006 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" podStartSLOduration=75.568984185 podStartE2EDuration="1m15.568984185s" podCreationTimestamp="2025-12-02 16:43:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:26.533868047 +0000 UTC m=+97.060756796" watchObservedRunningTime="2025-12-02 16:44:26.568984185 +0000 UTC m=+97.095872934" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.630392 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rxrth" Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.669108 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:26 crc kubenswrapper[4747]: E1202 16:44:26.669532 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:27.169518448 +0000 UTC m=+97.696407197 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.771121 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:26 crc kubenswrapper[4747]: E1202 16:44:26.771706 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:27.271683615 +0000 UTC m=+97.798572364 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.872373 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vsbt4"] Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.874125 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:26 crc kubenswrapper[4747]: E1202 16:44:26.874550 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:27.374531211 +0000 UTC m=+97.901419960 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.972245 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jl6rp"] Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.975958 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:26 crc kubenswrapper[4747]: E1202 16:44:26.976424 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:27.47640088 +0000 UTC m=+98.003289639 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:26 crc kubenswrapper[4747]: I1202 16:44:26.983185 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rxrth"] Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.078112 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:27 crc kubenswrapper[4747]: E1202 16:44:27.078458 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:27.578446165 +0000 UTC m=+98.105334914 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.179298 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:27 crc kubenswrapper[4747]: E1202 16:44:27.179433 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:27.679408629 +0000 UTC m=+98.206297378 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.179802 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:27 crc kubenswrapper[4747]: E1202 16:44:27.180257 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:27.680244372 +0000 UTC m=+98.207133121 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:27 crc kubenswrapper[4747]: W1202 16:44:27.216067 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6b230695_281f_405b_98c1_eb2e1470889e.slice/crio-3cf48737995dc3169e7886dae4a5e1b49c5966864caf02a60106d781f760193e WatchSource:0}: Error finding container 3cf48737995dc3169e7886dae4a5e1b49c5966864caf02a60106d781f760193e: Status 404 returned error can't find the container with id 3cf48737995dc3169e7886dae4a5e1b49c5966864caf02a60106d781f760193e Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.281105 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:27 crc kubenswrapper[4747]: E1202 16:44:27.281471 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:27.781448753 +0000 UTC m=+98.308337502 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.281587 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:27 crc kubenswrapper[4747]: E1202 16:44:27.282072 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:27.7820625 +0000 UTC m=+98.308951249 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.382760 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:27 crc kubenswrapper[4747]: E1202 16:44:27.383622 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:27.88360586 +0000 UTC m=+98.410494609 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.451385 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 16:44:27 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Dec 02 16:44:27 crc kubenswrapper[4747]: [+]process-running ok Dec 02 16:44:27 crc kubenswrapper[4747]: healthz check failed Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.451844 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.485502 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:27 crc kubenswrapper[4747]: E1202 16:44:27.485832 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:27.985818149 +0000 UTC m=+98.512706898 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.512225 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxrth" event={"ID":"066ad5e3-8cd4-4a73-895d-76671a0d6aa9","Type":"ContainerStarted","Data":"3a5e6fa8b64c7f2a5e1877792dd94f0c62fb0b6c962af503649a7a9e44d45dae"} Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.513950 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vsbt4" event={"ID":"362f8e5d-8e91-435a-af79-5d318c2288e0","Type":"ContainerStarted","Data":"988f1825e1d2327195a48011c0ff846724987f20c8d8479bf06d2f80ccfffa93"} Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.517643 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-b79s7" event={"ID":"48b44473-6607-4bce-8136-3d355e5ff018","Type":"ContainerStarted","Data":"d6b7431902fc20f66cb40f193d73114334f1b16dc024956b6ee7c918a0113efc"} Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.521649 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vt692" event={"ID":"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27","Type":"ContainerStarted","Data":"ff48e4d332b88d5cd2e25172f2e3ab18fb858d634d0df71318db690cf87881ed"} Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.524773 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jl6rp" event={"ID":"6b230695-281f-405b-98c1-eb2e1470889e","Type":"ContainerStarted","Data":"3cf48737995dc3169e7886dae4a5e1b49c5966864caf02a60106d781f760193e"} Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.536329 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-fkbtn" Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.586131 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:27 crc kubenswrapper[4747]: E1202 16:44:27.586333 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:28.08630011 +0000 UTC m=+98.613188859 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.586409 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:27 crc kubenswrapper[4747]: E1202 16:44:27.586830 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:28.086822454 +0000 UTC m=+98.613711193 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.631726 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mvzvc"] Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.637344 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mvzvc" Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.645879 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.660850 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mvzvc"] Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.688138 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.688323 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fad8dbb-9212-436a-bad9-7439f27afec6-utilities\") pod \"redhat-marketplace-mvzvc\" (UID: \"2fad8dbb-9212-436a-bad9-7439f27afec6\") " pod="openshift-marketplace/redhat-marketplace-mvzvc" Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.688351 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fad8dbb-9212-436a-bad9-7439f27afec6-catalog-content\") pod \"redhat-marketplace-mvzvc\" (UID: \"2fad8dbb-9212-436a-bad9-7439f27afec6\") " pod="openshift-marketplace/redhat-marketplace-mvzvc" Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.688407 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8svh\" (UniqueName: \"kubernetes.io/projected/2fad8dbb-9212-436a-bad9-7439f27afec6-kube-api-access-x8svh\") pod \"redhat-marketplace-mvzvc\" (UID: \"2fad8dbb-9212-436a-bad9-7439f27afec6\") " pod="openshift-marketplace/redhat-marketplace-mvzvc" Dec 02 16:44:27 crc kubenswrapper[4747]: E1202 16:44:27.688586 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:28.1885687 +0000 UTC m=+98.715457449 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.742007 4747 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.790206 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8svh\" (UniqueName: \"kubernetes.io/projected/2fad8dbb-9212-436a-bad9-7439f27afec6-kube-api-access-x8svh\") pod \"redhat-marketplace-mvzvc\" (UID: \"2fad8dbb-9212-436a-bad9-7439f27afec6\") " pod="openshift-marketplace/redhat-marketplace-mvzvc" Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.790346 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.790388 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fad8dbb-9212-436a-bad9-7439f27afec6-utilities\") pod \"redhat-marketplace-mvzvc\" (UID: \"2fad8dbb-9212-436a-bad9-7439f27afec6\") " pod="openshift-marketplace/redhat-marketplace-mvzvc" Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.790417 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fad8dbb-9212-436a-bad9-7439f27afec6-catalog-content\") pod \"redhat-marketplace-mvzvc\" (UID: \"2fad8dbb-9212-436a-bad9-7439f27afec6\") " pod="openshift-marketplace/redhat-marketplace-mvzvc" Dec 02 16:44:27 crc kubenswrapper[4747]: E1202 16:44:27.790743 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:28.290723427 +0000 UTC m=+98.817612176 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.790975 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fad8dbb-9212-436a-bad9-7439f27afec6-catalog-content\") pod \"redhat-marketplace-mvzvc\" (UID: \"2fad8dbb-9212-436a-bad9-7439f27afec6\") " pod="openshift-marketplace/redhat-marketplace-mvzvc" Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.791153 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fad8dbb-9212-436a-bad9-7439f27afec6-utilities\") pod \"redhat-marketplace-mvzvc\" (UID: \"2fad8dbb-9212-436a-bad9-7439f27afec6\") " pod="openshift-marketplace/redhat-marketplace-mvzvc" Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.815285 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8svh\" (UniqueName: \"kubernetes.io/projected/2fad8dbb-9212-436a-bad9-7439f27afec6-kube-api-access-x8svh\") pod \"redhat-marketplace-mvzvc\" (UID: \"2fad8dbb-9212-436a-bad9-7439f27afec6\") " pod="openshift-marketplace/redhat-marketplace-mvzvc" Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.889293 4747 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-02T16:44:27.742050409Z","Handler":null,"Name":""} Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.891980 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:27 crc kubenswrapper[4747]: E1202 16:44:27.892255 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:28.392208966 +0000 UTC m=+98.919097715 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.892586 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:27 crc kubenswrapper[4747]: E1202 16:44:27.893033 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:28.393012238 +0000 UTC m=+98.919901147 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.994334 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:27 crc kubenswrapper[4747]: E1202 16:44:27.994574 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 16:44:28.494537668 +0000 UTC m=+99.021426427 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.994856 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.995435 4747 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 02 16:44:27 crc kubenswrapper[4747]: I1202 16:44:27.995500 4747 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 02 16:44:27 crc kubenswrapper[4747]: E1202 16:44:27.995446 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 16:44:28.495429502 +0000 UTC m=+99.022318251 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-9jst4" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.039490 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mvzvc" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.042803 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-27x57"] Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.044296 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-27x57" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.095712 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.096456 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36ead3e1-831c-424c-8f1e-4b4213621c9a-catalog-content\") pod \"redhat-marketplace-27x57\" (UID: \"36ead3e1-831c-424c-8f1e-4b4213621c9a\") " pod="openshift-marketplace/redhat-marketplace-27x57" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.096516 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36ead3e1-831c-424c-8f1e-4b4213621c9a-utilities\") pod \"redhat-marketplace-27x57\" (UID: \"36ead3e1-831c-424c-8f1e-4b4213621c9a\") " pod="openshift-marketplace/redhat-marketplace-27x57" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.096550 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnj5h\" (UniqueName: \"kubernetes.io/projected/36ead3e1-831c-424c-8f1e-4b4213621c9a-kube-api-access-gnj5h\") pod \"redhat-marketplace-27x57\" (UID: \"36ead3e1-831c-424c-8f1e-4b4213621c9a\") " pod="openshift-marketplace/redhat-marketplace-27x57" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.132593 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-27x57"] Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.166837 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.198100 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36ead3e1-831c-424c-8f1e-4b4213621c9a-catalog-content\") pod \"redhat-marketplace-27x57\" (UID: \"36ead3e1-831c-424c-8f1e-4b4213621c9a\") " pod="openshift-marketplace/redhat-marketplace-27x57" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.198187 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.198300 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36ead3e1-831c-424c-8f1e-4b4213621c9a-utilities\") pod \"redhat-marketplace-27x57\" (UID: \"36ead3e1-831c-424c-8f1e-4b4213621c9a\") " pod="openshift-marketplace/redhat-marketplace-27x57" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.198370 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnj5h\" (UniqueName: \"kubernetes.io/projected/36ead3e1-831c-424c-8f1e-4b4213621c9a-kube-api-access-gnj5h\") pod \"redhat-marketplace-27x57\" (UID: \"36ead3e1-831c-424c-8f1e-4b4213621c9a\") " pod="openshift-marketplace/redhat-marketplace-27x57" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.199444 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36ead3e1-831c-424c-8f1e-4b4213621c9a-catalog-content\") pod \"redhat-marketplace-27x57\" (UID: \"36ead3e1-831c-424c-8f1e-4b4213621c9a\") " pod="openshift-marketplace/redhat-marketplace-27x57" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.199755 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36ead3e1-831c-424c-8f1e-4b4213621c9a-utilities\") pod \"redhat-marketplace-27x57\" (UID: \"36ead3e1-831c-424c-8f1e-4b4213621c9a\") " pod="openshift-marketplace/redhat-marketplace-27x57" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.212129 4747 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.212194 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.273765 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnj5h\" (UniqueName: \"kubernetes.io/projected/36ead3e1-831c-424c-8f1e-4b4213621c9a-kube-api-access-gnj5h\") pod \"redhat-marketplace-27x57\" (UID: \"36ead3e1-831c-424c-8f1e-4b4213621c9a\") " pod="openshift-marketplace/redhat-marketplace-27x57" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.278819 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-9jst4\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.307771 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.360584 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-27x57" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.397517 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mvzvc"] Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.406907 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.408033 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.425133 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:28 crc kubenswrapper[4747]: W1202 16:44:28.433318 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2fad8dbb_9212_436a_bad9_7439f27afec6.slice/crio-449caf2c9f710470b3f0ea3d14dbbc644f1323919255d893b274e169b9f336dd WatchSource:0}: Error finding container 449caf2c9f710470b3f0ea3d14dbbc644f1323919255d893b274e169b9f336dd: Status 404 returned error can't find the container with id 449caf2c9f710470b3f0ea3d14dbbc644f1323919255d893b274e169b9f336dd Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.447845 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.455359 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 16:44:28 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Dec 02 16:44:28 crc kubenswrapper[4747]: [+]process-running ok Dec 02 16:44:28 crc kubenswrapper[4747]: healthz check failed Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.455941 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.557196 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-b79s7" event={"ID":"48b44473-6607-4bce-8136-3d355e5ff018","Type":"ContainerStarted","Data":"701a553bafac8158210c9767e2f1c73d20da9940b86e376e0b97942b4baa4439"} Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.564859 4747 generic.go:334] "Generic (PLEG): container finished" podID="b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27" containerID="ff48e4d332b88d5cd2e25172f2e3ab18fb858d634d0df71318db690cf87881ed" exitCode=0 Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.565274 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vt692" event={"ID":"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27","Type":"ContainerDied","Data":"ff48e4d332b88d5cd2e25172f2e3ab18fb858d634d0df71318db690cf87881ed"} Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.567025 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.588646 4747 generic.go:334] "Generic (PLEG): container finished" podID="6b230695-281f-405b-98c1-eb2e1470889e" containerID="9be29bcaf21b8841aef7d58968b0b6bbc6c2687a51e0c8933b96234af3c7807b" exitCode=0 Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.588786 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jl6rp" event={"ID":"6b230695-281f-405b-98c1-eb2e1470889e","Type":"ContainerDied","Data":"9be29bcaf21b8841aef7d58968b0b6bbc6c2687a51e0c8933b96234af3c7807b"} Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.597203 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mvzvc" event={"ID":"2fad8dbb-9212-436a-bad9-7439f27afec6","Type":"ContainerStarted","Data":"449caf2c9f710470b3f0ea3d14dbbc644f1323919255d893b274e169b9f336dd"} Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.614907 4747 generic.go:334] "Generic (PLEG): container finished" podID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" containerID="8c5fd170f1381392c3f753f7cf02c3689320dc3755530a301a07b9bdde320573" exitCode=0 Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.615044 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxrth" event={"ID":"066ad5e3-8cd4-4a73-895d-76671a0d6aa9","Type":"ContainerDied","Data":"8c5fd170f1381392c3f753f7cf02c3689320dc3755530a301a07b9bdde320573"} Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.627102 4747 generic.go:334] "Generic (PLEG): container finished" podID="362f8e5d-8e91-435a-af79-5d318c2288e0" containerID="df9afed660d2eda8ac107058d5f1df11b470d0f14891421ab01250c84158e894" exitCode=0 Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.628593 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vsbt4" event={"ID":"362f8e5d-8e91-435a-af79-5d318c2288e0","Type":"ContainerDied","Data":"df9afed660d2eda8ac107058d5f1df11b470d0f14891421ab01250c84158e894"} Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.658203 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4mzwr" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.691214 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-9jst4"] Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.744134 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.745165 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.748737 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.763273 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.763492 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.810688 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e01ffb82-dadb-465d-971b-e53d11c2d62e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e01ffb82-dadb-465d-971b-e53d11c2d62e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.810783 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e01ffb82-dadb-465d-971b-e53d11c2d62e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e01ffb82-dadb-465d-971b-e53d11c2d62e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.831100 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-27x57"] Dec 02 16:44:28 crc kubenswrapper[4747]: W1202 16:44:28.849331 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod36ead3e1_831c_424c_8f1e_4b4213621c9a.slice/crio-3506ac0ddd3fdab431dc69166d5d848a96db34e552e69727450c6c2010d8c98b WatchSource:0}: Error finding container 3506ac0ddd3fdab431dc69166d5d848a96db34e552e69727450c6c2010d8c98b: Status 404 returned error can't find the container with id 3506ac0ddd3fdab431dc69166d5d848a96db34e552e69727450c6c2010d8c98b Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.887589 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dhq2f"] Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.889212 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dhq2f" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.898114 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.903754 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dhq2f"] Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.911964 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e01ffb82-dadb-465d-971b-e53d11c2d62e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e01ffb82-dadb-465d-971b-e53d11c2d62e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.912033 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-catalog-content\") pod \"redhat-operators-dhq2f\" (UID: \"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553\") " pod="openshift-marketplace/redhat-operators-dhq2f" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.912074 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-utilities\") pod \"redhat-operators-dhq2f\" (UID: \"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553\") " pod="openshift-marketplace/redhat-operators-dhq2f" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.912097 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb9hs\" (UniqueName: \"kubernetes.io/projected/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-kube-api-access-rb9hs\") pod \"redhat-operators-dhq2f\" (UID: \"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553\") " pod="openshift-marketplace/redhat-operators-dhq2f" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.912137 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e01ffb82-dadb-465d-971b-e53d11c2d62e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e01ffb82-dadb-465d-971b-e53d11c2d62e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.912291 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e01ffb82-dadb-465d-971b-e53d11c2d62e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e01ffb82-dadb-465d-971b-e53d11c2d62e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 16:44:28 crc kubenswrapper[4747]: I1202 16:44:28.970154 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e01ffb82-dadb-465d-971b-e53d11c2d62e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e01ffb82-dadb-465d-971b-e53d11c2d62e\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.016898 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-catalog-content\") pod \"redhat-operators-dhq2f\" (UID: \"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553\") " pod="openshift-marketplace/redhat-operators-dhq2f" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.017008 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-utilities\") pod \"redhat-operators-dhq2f\" (UID: \"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553\") " pod="openshift-marketplace/redhat-operators-dhq2f" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.017050 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb9hs\" (UniqueName: \"kubernetes.io/projected/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-kube-api-access-rb9hs\") pod \"redhat-operators-dhq2f\" (UID: \"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553\") " pod="openshift-marketplace/redhat-operators-dhq2f" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.018039 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-catalog-content\") pod \"redhat-operators-dhq2f\" (UID: \"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553\") " pod="openshift-marketplace/redhat-operators-dhq2f" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.018284 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-utilities\") pod \"redhat-operators-dhq2f\" (UID: \"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553\") " pod="openshift-marketplace/redhat-operators-dhq2f" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.018860 4747 patch_prober.go:28] interesting pod/downloads-7954f5f757-f27zn container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.018967 4747 patch_prober.go:28] interesting pod/downloads-7954f5f757-f27zn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.018953 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-f27zn" podUID="581ef685-55a6-489c-ab7d-ac0db896aa30" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.019021 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-f27zn" podUID="581ef685-55a6-489c-ab7d-ac0db896aa30" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.22:8080/\": dial tcp 10.217.0.22:8080: connect: connection refused" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.046738 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb9hs\" (UniqueName: \"kubernetes.io/projected/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-kube-api-access-rb9hs\") pod \"redhat-operators-dhq2f\" (UID: \"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553\") " pod="openshift-marketplace/redhat-operators-dhq2f" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.132105 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.173238 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.221989 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.223128 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.225401 4747 patch_prober.go:28] interesting pod/console-f9d7485db-8w6hw container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.225451 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-8w6hw" podUID="fd81d86e-f692-4a5d-885f-9c37fa608b4f" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.248042 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dhq2f" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.417407 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.417463 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.459111 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 16:44:29 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Dec 02 16:44:29 crc kubenswrapper[4747]: [+]process-running ok Dec 02 16:44:29 crc kubenswrapper[4747]: healthz check failed Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.459651 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.460316 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-d7l68"] Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.460356 4747 patch_prober.go:28] interesting pod/apiserver-76f77b778f-dnt2d container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 02 16:44:29 crc kubenswrapper[4747]: [+]log ok Dec 02 16:44:29 crc kubenswrapper[4747]: [+]etcd ok Dec 02 16:44:29 crc kubenswrapper[4747]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 02 16:44:29 crc kubenswrapper[4747]: [+]poststarthook/generic-apiserver-start-informers ok Dec 02 16:44:29 crc kubenswrapper[4747]: [+]poststarthook/max-in-flight-filter ok Dec 02 16:44:29 crc kubenswrapper[4747]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 02 16:44:29 crc kubenswrapper[4747]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 02 16:44:29 crc kubenswrapper[4747]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 02 16:44:29 crc kubenswrapper[4747]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 02 16:44:29 crc kubenswrapper[4747]: [+]poststarthook/project.openshift.io-projectcache ok Dec 02 16:44:29 crc kubenswrapper[4747]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 02 16:44:29 crc kubenswrapper[4747]: [+]poststarthook/openshift.io-startinformers ok Dec 02 16:44:29 crc kubenswrapper[4747]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 02 16:44:29 crc kubenswrapper[4747]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 02 16:44:29 crc kubenswrapper[4747]: livez check failed Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.460478 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" podUID="2079a061-35c0-40b3-8591-53decd25d0bf" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.474753 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d7l68"] Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.475153 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d7l68" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.539858 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftqx7\" (UniqueName: \"kubernetes.io/projected/788ee83c-7c8c-4e57-a31e-0f658765b846-kube-api-access-ftqx7\") pod \"redhat-operators-d7l68\" (UID: \"788ee83c-7c8c-4e57-a31e-0f658765b846\") " pod="openshift-marketplace/redhat-operators-d7l68" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.540000 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/788ee83c-7c8c-4e57-a31e-0f658765b846-utilities\") pod \"redhat-operators-d7l68\" (UID: \"788ee83c-7c8c-4e57-a31e-0f658765b846\") " pod="openshift-marketplace/redhat-operators-d7l68" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.540221 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/788ee83c-7c8c-4e57-a31e-0f658765b846-catalog-content\") pod \"redhat-operators-d7l68\" (UID: \"788ee83c-7c8c-4e57-a31e-0f658765b846\") " pod="openshift-marketplace/redhat-operators-d7l68" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.642301 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftqx7\" (UniqueName: \"kubernetes.io/projected/788ee83c-7c8c-4e57-a31e-0f658765b846-kube-api-access-ftqx7\") pod \"redhat-operators-d7l68\" (UID: \"788ee83c-7c8c-4e57-a31e-0f658765b846\") " pod="openshift-marketplace/redhat-operators-d7l68" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.642389 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/788ee83c-7c8c-4e57-a31e-0f658765b846-utilities\") pod \"redhat-operators-d7l68\" (UID: \"788ee83c-7c8c-4e57-a31e-0f658765b846\") " pod="openshift-marketplace/redhat-operators-d7l68" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.642477 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/788ee83c-7c8c-4e57-a31e-0f658765b846-catalog-content\") pod \"redhat-operators-d7l68\" (UID: \"788ee83c-7c8c-4e57-a31e-0f658765b846\") " pod="openshift-marketplace/redhat-operators-d7l68" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.643085 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/788ee83c-7c8c-4e57-a31e-0f658765b846-catalog-content\") pod \"redhat-operators-d7l68\" (UID: \"788ee83c-7c8c-4e57-a31e-0f658765b846\") " pod="openshift-marketplace/redhat-operators-d7l68" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.643341 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/788ee83c-7c8c-4e57-a31e-0f658765b846-utilities\") pod \"redhat-operators-d7l68\" (UID: \"788ee83c-7c8c-4e57-a31e-0f658765b846\") " pod="openshift-marketplace/redhat-operators-d7l68" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.644106 4747 generic.go:334] "Generic (PLEG): container finished" podID="2fad8dbb-9212-436a-bad9-7439f27afec6" containerID="4dec5878f72bba21d495145cd04c08cf7e688239e376e9de3b38354b486f6c4d" exitCode=0 Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.644178 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mvzvc" event={"ID":"2fad8dbb-9212-436a-bad9-7439f27afec6","Type":"ContainerDied","Data":"4dec5878f72bba21d495145cd04c08cf7e688239e376e9de3b38354b486f6c4d"} Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.651500 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.653049 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27x57" event={"ID":"36ead3e1-831c-424c-8f1e-4b4213621c9a","Type":"ContainerStarted","Data":"3506ac0ddd3fdab431dc69166d5d848a96db34e552e69727450c6c2010d8c98b"} Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.656157 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" event={"ID":"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3","Type":"ContainerStarted","Data":"47029f4074c32a9a4ea38e0b5552badfe0c91d33d098d142a4cc0de53fae332c"} Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.656229 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" event={"ID":"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3","Type":"ContainerStarted","Data":"092de9915beaa93e02f207a5b5a60955446858e0da498f0ebf659b798134ecb2"} Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.661993 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-b79s7" event={"ID":"48b44473-6607-4bce-8136-3d355e5ff018","Type":"ContainerStarted","Data":"95716a4348c04e8e5c03b14ca38b6eed5a188f38131fdebac0e35a774872c6bd"} Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.679339 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftqx7\" (UniqueName: \"kubernetes.io/projected/788ee83c-7c8c-4e57-a31e-0f658765b846-kube-api-access-ftqx7\") pod \"redhat-operators-d7l68\" (UID: \"788ee83c-7c8c-4e57-a31e-0f658765b846\") " pod="openshift-marketplace/redhat-operators-d7l68" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.707512 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-b79s7" podStartSLOduration=13.707442339 podStartE2EDuration="13.707442339s" podCreationTimestamp="2025-12-02 16:44:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:29.702980527 +0000 UTC m=+100.229869276" watchObservedRunningTime="2025-12-02 16:44:29.707442339 +0000 UTC m=+100.234331088" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.816179 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d7l68" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.816876 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 02 16:44:29 crc kubenswrapper[4747]: I1202 16:44:29.872788 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dhq2f"] Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.221617 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d7l68"] Dec 02 16:44:30 crc kubenswrapper[4747]: W1202 16:44:30.257603 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod788ee83c_7c8c_4e57_a31e_0f658765b846.slice/crio-fa340af51b029a392735fa527154f9b56ef003c75e212923043f31cc5fe2f8af WatchSource:0}: Error finding container fa340af51b029a392735fa527154f9b56ef003c75e212923043f31cc5fe2f8af: Status 404 returned error can't find the container with id fa340af51b029a392735fa527154f9b56ef003c75e212923043f31cc5fe2f8af Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.273804 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.274613 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.278544 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.278832 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.282905 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.373832 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a5015bdd-fa36-4103-8336-ec9188ed23ef-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a5015bdd-fa36-4103-8336-ec9188ed23ef\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.374408 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5015bdd-fa36-4103-8336-ec9188ed23ef-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a5015bdd-fa36-4103-8336-ec9188ed23ef\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.452362 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 16:44:30 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Dec 02 16:44:30 crc kubenswrapper[4747]: [+]process-running ok Dec 02 16:44:30 crc kubenswrapper[4747]: healthz check failed Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.452445 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.475489 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5015bdd-fa36-4103-8336-ec9188ed23ef-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a5015bdd-fa36-4103-8336-ec9188ed23ef\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.475608 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs\") pod \"network-metrics-daemon-8brc6\" (UID: \"fe274425-e804-4934-aa14-81ef24981fe9\") " pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.475665 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a5015bdd-fa36-4103-8336-ec9188ed23ef-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a5015bdd-fa36-4103-8336-ec9188ed23ef\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.475778 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a5015bdd-fa36-4103-8336-ec9188ed23ef-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a5015bdd-fa36-4103-8336-ec9188ed23ef\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.484895 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fe274425-e804-4934-aa14-81ef24981fe9-metrics-certs\") pod \"network-metrics-daemon-8brc6\" (UID: \"fe274425-e804-4934-aa14-81ef24981fe9\") " pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.498439 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5015bdd-fa36-4103-8336-ec9188ed23ef-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a5015bdd-fa36-4103-8336-ec9188ed23ef\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.603023 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.665808 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-8brc6" Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.683325 4747 generic.go:334] "Generic (PLEG): container finished" podID="2530933f-b67d-4f7e-93c2-3edf83af285b" containerID="ba438233e8df9edb1202f44f7b3e1ec5dfa47560bef619deb81b17a4767a4968" exitCode=0 Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.683402 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" event={"ID":"2530933f-b67d-4f7e-93c2-3edf83af285b","Type":"ContainerDied","Data":"ba438233e8df9edb1202f44f7b3e1ec5dfa47560bef619deb81b17a4767a4968"} Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.687638 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d7l68" event={"ID":"788ee83c-7c8c-4e57-a31e-0f658765b846","Type":"ContainerStarted","Data":"fa340af51b029a392735fa527154f9b56ef003c75e212923043f31cc5fe2f8af"} Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.692615 4747 generic.go:334] "Generic (PLEG): container finished" podID="36ead3e1-831c-424c-8f1e-4b4213621c9a" containerID="a2c4ac71dc25c16e8d5b255f73349c26c241ea3459093e5c9a4bd1d99b760765" exitCode=0 Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.692679 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27x57" event={"ID":"36ead3e1-831c-424c-8f1e-4b4213621c9a","Type":"ContainerDied","Data":"a2c4ac71dc25c16e8d5b255f73349c26c241ea3459093e5c9a4bd1d99b760765"} Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.705157 4747 generic.go:334] "Generic (PLEG): container finished" podID="37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" containerID="67cce7ffe76d2c557eef7161c48623ad61a57275334c9657d245107aaf035cf8" exitCode=0 Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.705224 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhq2f" event={"ID":"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553","Type":"ContainerDied","Data":"67cce7ffe76d2c557eef7161c48623ad61a57275334c9657d245107aaf035cf8"} Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.705255 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhq2f" event={"ID":"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553","Type":"ContainerStarted","Data":"8723fa04857b53daaf8b7a280ead579c0435ead1e9d6fa496baf91ce8b0ad9f0"} Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.713099 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e01ffb82-dadb-465d-971b-e53d11c2d62e","Type":"ContainerStarted","Data":"6a54282bc1bea5664d954d4448419ec10fe8046624266aaf0901bb1a0c070675"} Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.713143 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e01ffb82-dadb-465d-971b-e53d11c2d62e","Type":"ContainerStarted","Data":"7cf855fc1d6c504aab9af27dde95855db329fd8bc1d7419b1ce98aafb88ee75a"} Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.713163 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.742611 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.742592161 podStartE2EDuration="2.742592161s" podCreationTimestamp="2025-12-02 16:44:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:30.736956117 +0000 UTC m=+101.263844866" watchObservedRunningTime="2025-12-02 16:44:30.742592161 +0000 UTC m=+101.269480910" Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.771285 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" podStartSLOduration=78.771258043 podStartE2EDuration="1m18.771258043s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:30.768497567 +0000 UTC m=+101.295386316" watchObservedRunningTime="2025-12-02 16:44:30.771258043 +0000 UTC m=+101.298146802" Dec 02 16:44:30 crc kubenswrapper[4747]: I1202 16:44:30.864673 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 16:44:31 crc kubenswrapper[4747]: I1202 16:44:31.017415 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-8brc6"] Dec 02 16:44:31 crc kubenswrapper[4747]: I1202 16:44:31.034686 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:44:31 crc kubenswrapper[4747]: W1202 16:44:31.058182 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfe274425_e804_4934_aa14_81ef24981fe9.slice/crio-ca1e80576825aa72a65515d8e5500ca57fa8c6df99864795972a81ab20507275 WatchSource:0}: Error finding container ca1e80576825aa72a65515d8e5500ca57fa8c6df99864795972a81ab20507275: Status 404 returned error can't find the container with id ca1e80576825aa72a65515d8e5500ca57fa8c6df99864795972a81ab20507275 Dec 02 16:44:31 crc kubenswrapper[4747]: I1202 16:44:31.452588 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 16:44:31 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Dec 02 16:44:31 crc kubenswrapper[4747]: [+]process-running ok Dec 02 16:44:31 crc kubenswrapper[4747]: healthz check failed Dec 02 16:44:31 crc kubenswrapper[4747]: I1202 16:44:31.453190 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:31 crc kubenswrapper[4747]: I1202 16:44:31.750888 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a5015bdd-fa36-4103-8336-ec9188ed23ef","Type":"ContainerStarted","Data":"ab72c26dacb21b48852c268457384fffb677d45d45960792e60c26af136c5d0d"} Dec 02 16:44:31 crc kubenswrapper[4747]: I1202 16:44:31.750973 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a5015bdd-fa36-4103-8336-ec9188ed23ef","Type":"ContainerStarted","Data":"4d9dfcb8d0fd098cb7baec67fceb6865480c4cc70d088bddadc3c4e1025de0c3"} Dec 02 16:44:31 crc kubenswrapper[4747]: I1202 16:44:31.753616 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-8brc6" event={"ID":"fe274425-e804-4934-aa14-81ef24981fe9","Type":"ContainerStarted","Data":"9590d29f16c550bc33e1ead64f195cc7ed0622d52e6b3e1e124aed97aefef2b9"} Dec 02 16:44:31 crc kubenswrapper[4747]: I1202 16:44:31.753690 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-8brc6" event={"ID":"fe274425-e804-4934-aa14-81ef24981fe9","Type":"ContainerStarted","Data":"ca1e80576825aa72a65515d8e5500ca57fa8c6df99864795972a81ab20507275"} Dec 02 16:44:31 crc kubenswrapper[4747]: I1202 16:44:31.769999 4747 generic.go:334] "Generic (PLEG): container finished" podID="e01ffb82-dadb-465d-971b-e53d11c2d62e" containerID="6a54282bc1bea5664d954d4448419ec10fe8046624266aaf0901bb1a0c070675" exitCode=0 Dec 02 16:44:31 crc kubenswrapper[4747]: I1202 16:44:31.772401 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=1.772379526 podStartE2EDuration="1.772379526s" podCreationTimestamp="2025-12-02 16:44:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:31.771672516 +0000 UTC m=+102.298561265" watchObservedRunningTime="2025-12-02 16:44:31.772379526 +0000 UTC m=+102.299268275" Dec 02 16:44:31 crc kubenswrapper[4747]: I1202 16:44:31.800723 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e01ffb82-dadb-465d-971b-e53d11c2d62e","Type":"ContainerDied","Data":"6a54282bc1bea5664d954d4448419ec10fe8046624266aaf0901bb1a0c070675"} Dec 02 16:44:31 crc kubenswrapper[4747]: I1202 16:44:31.803231 4747 generic.go:334] "Generic (PLEG): container finished" podID="788ee83c-7c8c-4e57-a31e-0f658765b846" containerID="b057fb7deaf7c7ecac1dc423aabc999ac8f8eed4bffcf9c6650ad84fd6a088a8" exitCode=0 Dec 02 16:44:31 crc kubenswrapper[4747]: I1202 16:44:31.804809 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d7l68" event={"ID":"788ee83c-7c8c-4e57-a31e-0f658765b846","Type":"ContainerDied","Data":"b057fb7deaf7c7ecac1dc423aabc999ac8f8eed4bffcf9c6650ad84fd6a088a8"} Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.142584 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.219668 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2530933f-b67d-4f7e-93c2-3edf83af285b-config-volume\") pod \"2530933f-b67d-4f7e-93c2-3edf83af285b\" (UID: \"2530933f-b67d-4f7e-93c2-3edf83af285b\") " Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.219790 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxqgc\" (UniqueName: \"kubernetes.io/projected/2530933f-b67d-4f7e-93c2-3edf83af285b-kube-api-access-nxqgc\") pod \"2530933f-b67d-4f7e-93c2-3edf83af285b\" (UID: \"2530933f-b67d-4f7e-93c2-3edf83af285b\") " Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.219858 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2530933f-b67d-4f7e-93c2-3edf83af285b-secret-volume\") pod \"2530933f-b67d-4f7e-93c2-3edf83af285b\" (UID: \"2530933f-b67d-4f7e-93c2-3edf83af285b\") " Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.221929 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2530933f-b67d-4f7e-93c2-3edf83af285b-config-volume" (OuterVolumeSpecName: "config-volume") pod "2530933f-b67d-4f7e-93c2-3edf83af285b" (UID: "2530933f-b67d-4f7e-93c2-3edf83af285b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.241595 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2530933f-b67d-4f7e-93c2-3edf83af285b-kube-api-access-nxqgc" (OuterVolumeSpecName: "kube-api-access-nxqgc") pod "2530933f-b67d-4f7e-93c2-3edf83af285b" (UID: "2530933f-b67d-4f7e-93c2-3edf83af285b"). InnerVolumeSpecName "kube-api-access-nxqgc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.258169 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2530933f-b67d-4f7e-93c2-3edf83af285b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2530933f-b67d-4f7e-93c2-3edf83af285b" (UID: "2530933f-b67d-4f7e-93c2-3edf83af285b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.321164 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxqgc\" (UniqueName: \"kubernetes.io/projected/2530933f-b67d-4f7e-93c2-3edf83af285b-kube-api-access-nxqgc\") on node \"crc\" DevicePath \"\"" Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.321210 4747 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2530933f-b67d-4f7e-93c2-3edf83af285b-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.321220 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2530933f-b67d-4f7e-93c2-3edf83af285b-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.450978 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 16:44:32 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Dec 02 16:44:32 crc kubenswrapper[4747]: [+]process-running ok Dec 02 16:44:32 crc kubenswrapper[4747]: healthz check failed Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.451067 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.816794 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" event={"ID":"2530933f-b67d-4f7e-93c2-3edf83af285b","Type":"ContainerDied","Data":"6b3cf2c24302386fdd5ab194c606c8ba262d9031897b544c2e928f94df07959d"} Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.816853 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b3cf2c24302386fdd5ab194c606c8ba262d9031897b544c2e928f94df07959d" Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.816973 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7" Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.847613 4747 generic.go:334] "Generic (PLEG): container finished" podID="a5015bdd-fa36-4103-8336-ec9188ed23ef" containerID="ab72c26dacb21b48852c268457384fffb677d45d45960792e60c26af136c5d0d" exitCode=0 Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.847731 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a5015bdd-fa36-4103-8336-ec9188ed23ef","Type":"ContainerDied","Data":"ab72c26dacb21b48852c268457384fffb677d45d45960792e60c26af136c5d0d"} Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.861482 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-8brc6" event={"ID":"fe274425-e804-4934-aa14-81ef24981fe9","Type":"ContainerStarted","Data":"63f20dfe3c8d03bd815fb8e1e22825703e340dade9e8bb6b99ac98b2975d3627"} Dec 02 16:44:32 crc kubenswrapper[4747]: I1202 16:44:32.894047 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-8brc6" podStartSLOduration=80.894019177 podStartE2EDuration="1m20.894019177s" podCreationTimestamp="2025-12-02 16:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:44:32.89009737 +0000 UTC m=+103.416986139" watchObservedRunningTime="2025-12-02 16:44:32.894019177 +0000 UTC m=+103.420907926" Dec 02 16:44:33 crc kubenswrapper[4747]: I1202 16:44:33.451490 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 16:44:33 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Dec 02 16:44:33 crc kubenswrapper[4747]: [+]process-running ok Dec 02 16:44:33 crc kubenswrapper[4747]: healthz check failed Dec 02 16:44:33 crc kubenswrapper[4747]: I1202 16:44:33.451943 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:33 crc kubenswrapper[4747]: I1202 16:44:33.468682 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 16:44:33 crc kubenswrapper[4747]: I1202 16:44:33.536947 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e01ffb82-dadb-465d-971b-e53d11c2d62e-kubelet-dir\") pod \"e01ffb82-dadb-465d-971b-e53d11c2d62e\" (UID: \"e01ffb82-dadb-465d-971b-e53d11c2d62e\") " Dec 02 16:44:33 crc kubenswrapper[4747]: I1202 16:44:33.537081 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e01ffb82-dadb-465d-971b-e53d11c2d62e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e01ffb82-dadb-465d-971b-e53d11c2d62e" (UID: "e01ffb82-dadb-465d-971b-e53d11c2d62e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:44:33 crc kubenswrapper[4747]: I1202 16:44:33.537110 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e01ffb82-dadb-465d-971b-e53d11c2d62e-kube-api-access\") pod \"e01ffb82-dadb-465d-971b-e53d11c2d62e\" (UID: \"e01ffb82-dadb-465d-971b-e53d11c2d62e\") " Dec 02 16:44:33 crc kubenswrapper[4747]: I1202 16:44:33.538894 4747 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e01ffb82-dadb-465d-971b-e53d11c2d62e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 16:44:33 crc kubenswrapper[4747]: I1202 16:44:33.544598 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e01ffb82-dadb-465d-971b-e53d11c2d62e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e01ffb82-dadb-465d-971b-e53d11c2d62e" (UID: "e01ffb82-dadb-465d-971b-e53d11c2d62e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:44:33 crc kubenswrapper[4747]: I1202 16:44:33.641148 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e01ffb82-dadb-465d-971b-e53d11c2d62e-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 16:44:33 crc kubenswrapper[4747]: I1202 16:44:33.880600 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 16:44:33 crc kubenswrapper[4747]: I1202 16:44:33.881296 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e01ffb82-dadb-465d-971b-e53d11c2d62e","Type":"ContainerDied","Data":"7cf855fc1d6c504aab9af27dde95855db329fd8bc1d7419b1ce98aafb88ee75a"} Dec 02 16:44:33 crc kubenswrapper[4747]: I1202 16:44:33.882531 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7cf855fc1d6c504aab9af27dde95855db329fd8bc1d7419b1ce98aafb88ee75a" Dec 02 16:44:34 crc kubenswrapper[4747]: I1202 16:44:34.254172 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 16:44:34 crc kubenswrapper[4747]: I1202 16:44:34.350393 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5015bdd-fa36-4103-8336-ec9188ed23ef-kube-api-access\") pod \"a5015bdd-fa36-4103-8336-ec9188ed23ef\" (UID: \"a5015bdd-fa36-4103-8336-ec9188ed23ef\") " Dec 02 16:44:34 crc kubenswrapper[4747]: I1202 16:44:34.350535 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a5015bdd-fa36-4103-8336-ec9188ed23ef-kubelet-dir\") pod \"a5015bdd-fa36-4103-8336-ec9188ed23ef\" (UID: \"a5015bdd-fa36-4103-8336-ec9188ed23ef\") " Dec 02 16:44:34 crc kubenswrapper[4747]: I1202 16:44:34.350695 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a5015bdd-fa36-4103-8336-ec9188ed23ef-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a5015bdd-fa36-4103-8336-ec9188ed23ef" (UID: "a5015bdd-fa36-4103-8336-ec9188ed23ef"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:44:34 crc kubenswrapper[4747]: I1202 16:44:34.350929 4747 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a5015bdd-fa36-4103-8336-ec9188ed23ef-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 16:44:34 crc kubenswrapper[4747]: I1202 16:44:34.353722 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5015bdd-fa36-4103-8336-ec9188ed23ef-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a5015bdd-fa36-4103-8336-ec9188ed23ef" (UID: "a5015bdd-fa36-4103-8336-ec9188ed23ef"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:44:34 crc kubenswrapper[4747]: I1202 16:44:34.423625 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:34 crc kubenswrapper[4747]: I1202 16:44:34.432393 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-dnt2d" Dec 02 16:44:34 crc kubenswrapper[4747]: I1202 16:44:34.458409 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 16:44:34 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Dec 02 16:44:34 crc kubenswrapper[4747]: [+]process-running ok Dec 02 16:44:34 crc kubenswrapper[4747]: healthz check failed Dec 02 16:44:34 crc kubenswrapper[4747]: I1202 16:44:34.458484 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:34 crc kubenswrapper[4747]: I1202 16:44:34.458547 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5015bdd-fa36-4103-8336-ec9188ed23ef-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 16:44:34 crc kubenswrapper[4747]: I1202 16:44:34.560350 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-c4zkq" Dec 02 16:44:34 crc kubenswrapper[4747]: I1202 16:44:34.907781 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a5015bdd-fa36-4103-8336-ec9188ed23ef","Type":"ContainerDied","Data":"4d9dfcb8d0fd098cb7baec67fceb6865480c4cc70d088bddadc3c4e1025de0c3"} Dec 02 16:44:34 crc kubenswrapper[4747]: I1202 16:44:34.907860 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d9dfcb8d0fd098cb7baec67fceb6865480c4cc70d088bddadc3c4e1025de0c3" Dec 02 16:44:34 crc kubenswrapper[4747]: I1202 16:44:34.907862 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 16:44:35 crc kubenswrapper[4747]: I1202 16:44:35.448932 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 16:44:35 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Dec 02 16:44:35 crc kubenswrapper[4747]: [+]process-running ok Dec 02 16:44:35 crc kubenswrapper[4747]: healthz check failed Dec 02 16:44:35 crc kubenswrapper[4747]: I1202 16:44:35.449327 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:36 crc kubenswrapper[4747]: I1202 16:44:36.449182 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 16:44:36 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Dec 02 16:44:36 crc kubenswrapper[4747]: [+]process-running ok Dec 02 16:44:36 crc kubenswrapper[4747]: healthz check failed Dec 02 16:44:36 crc kubenswrapper[4747]: I1202 16:44:36.449295 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 16:44:37 crc kubenswrapper[4747]: I1202 16:44:37.451308 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:37 crc kubenswrapper[4747]: I1202 16:44:37.459201 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-qmfcd" Dec 02 16:44:39 crc kubenswrapper[4747]: I1202 16:44:39.053659 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-f27zn" Dec 02 16:44:39 crc kubenswrapper[4747]: I1202 16:44:39.223480 4747 patch_prober.go:28] interesting pod/console-f9d7485db-8w6hw container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Dec 02 16:44:39 crc kubenswrapper[4747]: I1202 16:44:39.223550 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-8w6hw" podUID="fd81d86e-f692-4a5d-885f-9c37fa608b4f" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" Dec 02 16:44:48 crc kubenswrapper[4747]: I1202 16:44:48.319470 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:44:49 crc kubenswrapper[4747]: I1202 16:44:49.492593 4747 patch_prober.go:28] interesting pod/router-default-5444994796-qmfcd container/router namespace/openshift-ingress: Readiness probe status=failure output="Get \"http://localhost:1936/healthz/ready\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 16:44:49 crc kubenswrapper[4747]: I1202 16:44:49.492717 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-ingress/router-default-5444994796-qmfcd" podUID="04e7d78c-36d3-4997-8ed4-62c494eda7d3" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 16:44:49 crc kubenswrapper[4747]: I1202 16:44:49.496730 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:49 crc kubenswrapper[4747]: I1202 16:44:49.502020 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:44:59 crc kubenswrapper[4747]: I1202 16:44:59.745329 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x5bs9" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.143077 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k"] Dec 02 16:45:00 crc kubenswrapper[4747]: E1202 16:45:00.143626 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e01ffb82-dadb-465d-971b-e53d11c2d62e" containerName="pruner" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.143724 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e01ffb82-dadb-465d-971b-e53d11c2d62e" containerName="pruner" Dec 02 16:45:00 crc kubenswrapper[4747]: E1202 16:45:00.143800 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5015bdd-fa36-4103-8336-ec9188ed23ef" containerName="pruner" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.143883 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5015bdd-fa36-4103-8336-ec9188ed23ef" containerName="pruner" Dec 02 16:45:00 crc kubenswrapper[4747]: E1202 16:45:00.144010 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2530933f-b67d-4f7e-93c2-3edf83af285b" containerName="collect-profiles" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.144079 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2530933f-b67d-4f7e-93c2-3edf83af285b" containerName="collect-profiles" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.144275 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5015bdd-fa36-4103-8336-ec9188ed23ef" containerName="pruner" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.144352 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="e01ffb82-dadb-465d-971b-e53d11c2d62e" containerName="pruner" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.144416 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2530933f-b67d-4f7e-93c2-3edf83af285b" containerName="collect-profiles" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.144844 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.147354 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.147409 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.155967 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k"] Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.245122 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95gfp\" (UniqueName: \"kubernetes.io/projected/2300c9ec-034d-4103-8a50-a9c5c507f1ad-kube-api-access-95gfp\") pod \"collect-profiles-29411565-x599k\" (UID: \"2300c9ec-034d-4103-8a50-a9c5c507f1ad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.245251 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2300c9ec-034d-4103-8a50-a9c5c507f1ad-secret-volume\") pod \"collect-profiles-29411565-x599k\" (UID: \"2300c9ec-034d-4103-8a50-a9c5c507f1ad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.245377 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2300c9ec-034d-4103-8a50-a9c5c507f1ad-config-volume\") pod \"collect-profiles-29411565-x599k\" (UID: \"2300c9ec-034d-4103-8a50-a9c5c507f1ad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.346409 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95gfp\" (UniqueName: \"kubernetes.io/projected/2300c9ec-034d-4103-8a50-a9c5c507f1ad-kube-api-access-95gfp\") pod \"collect-profiles-29411565-x599k\" (UID: \"2300c9ec-034d-4103-8a50-a9c5c507f1ad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.346554 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2300c9ec-034d-4103-8a50-a9c5c507f1ad-secret-volume\") pod \"collect-profiles-29411565-x599k\" (UID: \"2300c9ec-034d-4103-8a50-a9c5c507f1ad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.346831 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2300c9ec-034d-4103-8a50-a9c5c507f1ad-config-volume\") pod \"collect-profiles-29411565-x599k\" (UID: \"2300c9ec-034d-4103-8a50-a9c5c507f1ad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.348689 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2300c9ec-034d-4103-8a50-a9c5c507f1ad-config-volume\") pod \"collect-profiles-29411565-x599k\" (UID: \"2300c9ec-034d-4103-8a50-a9c5c507f1ad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.359638 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2300c9ec-034d-4103-8a50-a9c5c507f1ad-secret-volume\") pod \"collect-profiles-29411565-x599k\" (UID: \"2300c9ec-034d-4103-8a50-a9c5c507f1ad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.373263 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95gfp\" (UniqueName: \"kubernetes.io/projected/2300c9ec-034d-4103-8a50-a9c5c507f1ad-kube-api-access-95gfp\") pod \"collect-profiles-29411565-x599k\" (UID: \"2300c9ec-034d-4103-8a50-a9c5c507f1ad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k" Dec 02 16:45:00 crc kubenswrapper[4747]: I1202 16:45:00.513984 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k" Dec 02 16:45:05 crc kubenswrapper[4747]: I1202 16:45:05.470621 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 16:45:05 crc kubenswrapper[4747]: I1202 16:45:05.472114 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 16:45:05 crc kubenswrapper[4747]: I1202 16:45:05.475401 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 02 16:45:05 crc kubenswrapper[4747]: I1202 16:45:05.476128 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 02 16:45:05 crc kubenswrapper[4747]: I1202 16:45:05.482428 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 16:45:05 crc kubenswrapper[4747]: I1202 16:45:05.628857 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/88a302d9-a421-4b30-867a-60b3258553d5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"88a302d9-a421-4b30-867a-60b3258553d5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 16:45:05 crc kubenswrapper[4747]: I1202 16:45:05.628986 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/88a302d9-a421-4b30-867a-60b3258553d5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"88a302d9-a421-4b30-867a-60b3258553d5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 16:45:05 crc kubenswrapper[4747]: I1202 16:45:05.730113 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/88a302d9-a421-4b30-867a-60b3258553d5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"88a302d9-a421-4b30-867a-60b3258553d5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 16:45:05 crc kubenswrapper[4747]: I1202 16:45:05.730229 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/88a302d9-a421-4b30-867a-60b3258553d5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"88a302d9-a421-4b30-867a-60b3258553d5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 16:45:05 crc kubenswrapper[4747]: I1202 16:45:05.730282 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/88a302d9-a421-4b30-867a-60b3258553d5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"88a302d9-a421-4b30-867a-60b3258553d5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 16:45:05 crc kubenswrapper[4747]: I1202 16:45:05.765882 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/88a302d9-a421-4b30-867a-60b3258553d5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"88a302d9-a421-4b30-867a-60b3258553d5\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 16:45:05 crc kubenswrapper[4747]: I1202 16:45:05.806587 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 16:45:10 crc kubenswrapper[4747]: I1202 16:45:10.663021 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 16:45:10 crc kubenswrapper[4747]: I1202 16:45:10.665199 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 16:45:10 crc kubenswrapper[4747]: I1202 16:45:10.675089 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 16:45:10 crc kubenswrapper[4747]: I1202 16:45:10.846422 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f977bba4-b1bf-4915-bb71-71a4dd413d9b-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f977bba4-b1bf-4915-bb71-71a4dd413d9b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 16:45:10 crc kubenswrapper[4747]: I1202 16:45:10.846469 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f977bba4-b1bf-4915-bb71-71a4dd413d9b-var-lock\") pod \"installer-9-crc\" (UID: \"f977bba4-b1bf-4915-bb71-71a4dd413d9b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 16:45:10 crc kubenswrapper[4747]: I1202 16:45:10.846517 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f977bba4-b1bf-4915-bb71-71a4dd413d9b-kube-api-access\") pod \"installer-9-crc\" (UID: \"f977bba4-b1bf-4915-bb71-71a4dd413d9b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 16:45:10 crc kubenswrapper[4747]: I1202 16:45:10.947688 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f977bba4-b1bf-4915-bb71-71a4dd413d9b-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f977bba4-b1bf-4915-bb71-71a4dd413d9b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 16:45:10 crc kubenswrapper[4747]: I1202 16:45:10.947794 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f977bba4-b1bf-4915-bb71-71a4dd413d9b-var-lock\") pod \"installer-9-crc\" (UID: \"f977bba4-b1bf-4915-bb71-71a4dd413d9b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 16:45:10 crc kubenswrapper[4747]: I1202 16:45:10.947833 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f977bba4-b1bf-4915-bb71-71a4dd413d9b-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f977bba4-b1bf-4915-bb71-71a4dd413d9b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 16:45:10 crc kubenswrapper[4747]: I1202 16:45:10.947881 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f977bba4-b1bf-4915-bb71-71a4dd413d9b-kube-api-access\") pod \"installer-9-crc\" (UID: \"f977bba4-b1bf-4915-bb71-71a4dd413d9b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 16:45:10 crc kubenswrapper[4747]: I1202 16:45:10.947934 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f977bba4-b1bf-4915-bb71-71a4dd413d9b-var-lock\") pod \"installer-9-crc\" (UID: \"f977bba4-b1bf-4915-bb71-71a4dd413d9b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 16:45:10 crc kubenswrapper[4747]: I1202 16:45:10.969793 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f977bba4-b1bf-4915-bb71-71a4dd413d9b-kube-api-access\") pod \"installer-9-crc\" (UID: \"f977bba4-b1bf-4915-bb71-71a4dd413d9b\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 16:45:11 crc kubenswrapper[4747]: I1202 16:45:11.000615 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 16:45:15 crc kubenswrapper[4747]: E1202 16:45:15.232832 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 02 16:45:15 crc kubenswrapper[4747]: E1202 16:45:15.233311 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hqd6r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-vt692_openshift-marketplace(b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 16:45:15 crc kubenswrapper[4747]: E1202 16:45:15.234496 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-vt692" podUID="b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27" Dec 02 16:45:15 crc kubenswrapper[4747]: I1202 16:45:15.823714 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:45:15 crc kubenswrapper[4747]: I1202 16:45:15.823801 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:45:15 crc kubenswrapper[4747]: I1202 16:45:15.823832 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:45:15 crc kubenswrapper[4747]: I1202 16:45:15.823990 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:45:15 crc kubenswrapper[4747]: I1202 16:45:15.827239 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 02 16:45:15 crc kubenswrapper[4747]: I1202 16:45:15.828382 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 02 16:45:15 crc kubenswrapper[4747]: I1202 16:45:15.829185 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 02 16:45:15 crc kubenswrapper[4747]: I1202 16:45:15.835665 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 02 16:45:15 crc kubenswrapper[4747]: I1202 16:45:15.842530 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:45:15 crc kubenswrapper[4747]: I1202 16:45:15.848782 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:45:15 crc kubenswrapper[4747]: I1202 16:45:15.848878 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:45:15 crc kubenswrapper[4747]: I1202 16:45:15.981777 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:45:16 crc kubenswrapper[4747]: I1202 16:45:16.009143 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 16:45:16 crc kubenswrapper[4747]: I1202 16:45:16.661711 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:45:16 crc kubenswrapper[4747]: I1202 16:45:16.898180 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 16:45:31 crc kubenswrapper[4747]: I1202 16:45:31.795540 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 16:45:31 crc kubenswrapper[4747]: I1202 16:45:31.796312 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 16:45:33 crc kubenswrapper[4747]: E1202 16:45:33.437695 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 02 16:45:33 crc kubenswrapper[4747]: E1202 16:45:33.438179 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rb9hs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-dhq2f_openshift-marketplace(37641b5c-f1c3-490e-8b8c-8d7ee2ea4553): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 16:45:33 crc kubenswrapper[4747]: E1202 16:45:33.439939 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-dhq2f" podUID="37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" Dec 02 16:45:41 crc kubenswrapper[4747]: E1202 16:45:41.612383 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-dhq2f" podUID="37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" Dec 02 16:45:43 crc kubenswrapper[4747]: E1202 16:45:43.078401 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 02 16:45:43 crc kubenswrapper[4747]: E1202 16:45:43.078604 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7qt7j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-vsbt4_openshift-marketplace(362f8e5d-8e91-435a-af79-5d318c2288e0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 16:45:43 crc kubenswrapper[4747]: E1202 16:45:43.079823 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-vsbt4" podUID="362f8e5d-8e91-435a-af79-5d318c2288e0" Dec 02 16:45:43 crc kubenswrapper[4747]: E1202 16:45:43.459962 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 02 16:45:43 crc kubenswrapper[4747]: E1202 16:45:43.460154 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ftqx7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-d7l68_openshift-marketplace(788ee83c-7c8c-4e57-a31e-0f658765b846): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 16:45:43 crc kubenswrapper[4747]: E1202 16:45:43.461825 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-d7l68" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" Dec 02 16:45:44 crc kubenswrapper[4747]: E1202 16:45:44.273192 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-d7l68" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" Dec 02 16:45:44 crc kubenswrapper[4747]: E1202 16:45:44.273202 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-vsbt4" podUID="362f8e5d-8e91-435a-af79-5d318c2288e0" Dec 02 16:45:45 crc kubenswrapper[4747]: E1202 16:45:45.052832 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 02 16:45:45 crc kubenswrapper[4747]: E1202 16:45:45.053713 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x8svh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-mvzvc_openshift-marketplace(2fad8dbb-9212-436a-bad9-7439f27afec6): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 16:45:45 crc kubenswrapper[4747]: E1202 16:45:45.055404 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-mvzvc" podUID="2fad8dbb-9212-436a-bad9-7439f27afec6" Dec 02 16:45:45 crc kubenswrapper[4747]: I1202 16:45:45.085308 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 16:45:45 crc kubenswrapper[4747]: W1202 16:45:45.102155 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-e6100c3af89ebdeabf8b749d1a7233480f6fc7b4f4037af6abbfac112155d653 WatchSource:0}: Error finding container e6100c3af89ebdeabf8b749d1a7233480f6fc7b4f4037af6abbfac112155d653: Status 404 returned error can't find the container with id e6100c3af89ebdeabf8b749d1a7233480f6fc7b4f4037af6abbfac112155d653 Dec 02 16:45:45 crc kubenswrapper[4747]: I1202 16:45:45.160322 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 16:45:45 crc kubenswrapper[4747]: I1202 16:45:45.242938 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k"] Dec 02 16:45:45 crc kubenswrapper[4747]: I1202 16:45:45.491278 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k" event={"ID":"2300c9ec-034d-4103-8a50-a9c5c507f1ad","Type":"ContainerStarted","Data":"69e0b610290ebd2b5b9d0cf836c561d4cc3376da5a550b733e8bed31aa687061"} Dec 02 16:45:45 crc kubenswrapper[4747]: I1202 16:45:45.492715 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f977bba4-b1bf-4915-bb71-71a4dd413d9b","Type":"ContainerStarted","Data":"43d177fc68716b952919c221c1239a9084b6c6ec8e478ce6093d74560ef12faa"} Dec 02 16:45:45 crc kubenswrapper[4747]: I1202 16:45:45.495886 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"88a302d9-a421-4b30-867a-60b3258553d5","Type":"ContainerStarted","Data":"1da15aa08f46498162215ea55816d5f8fa93eee2187bc6c07426e0535291ebc8"} Dec 02 16:45:45 crc kubenswrapper[4747]: I1202 16:45:45.497781 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"de571194f643d778674910b0f8cd246c6b2e7b113b7e6209097f9d22e6986563"} Dec 02 16:45:45 crc kubenswrapper[4747]: I1202 16:45:45.498892 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"e6100c3af89ebdeabf8b749d1a7233480f6fc7b4f4037af6abbfac112155d653"} Dec 02 16:45:45 crc kubenswrapper[4747]: I1202 16:45:45.500123 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"3b2cd3932921751da74e7cab738fefd5ff96c38a35f93ec287276d764136df18"} Dec 02 16:45:45 crc kubenswrapper[4747]: E1202 16:45:45.502676 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-mvzvc" podUID="2fad8dbb-9212-436a-bad9-7439f27afec6" Dec 02 16:45:45 crc kubenswrapper[4747]: E1202 16:45:45.682838 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 02 16:45:45 crc kubenswrapper[4747]: E1202 16:45:45.683652 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9rvbw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-rxrth_openshift-marketplace(066ad5e3-8cd4-4a73-895d-76671a0d6aa9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 16:45:45 crc kubenswrapper[4747]: E1202 16:45:45.685035 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-rxrth" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" Dec 02 16:45:46 crc kubenswrapper[4747]: I1202 16:45:46.508080 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"40c4776fd328e6d88be98f2b4776a29567c812ede062c00d58489fa4d3551e3b"} Dec 02 16:45:46 crc kubenswrapper[4747]: I1202 16:45:46.511826 4747 generic.go:334] "Generic (PLEG): container finished" podID="2300c9ec-034d-4103-8a50-a9c5c507f1ad" containerID="4171828032fd92dcc8dc89ed341f4732b882280991babea504307e3880035c73" exitCode=0 Dec 02 16:45:46 crc kubenswrapper[4747]: I1202 16:45:46.511973 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k" event={"ID":"2300c9ec-034d-4103-8a50-a9c5c507f1ad","Type":"ContainerDied","Data":"4171828032fd92dcc8dc89ed341f4732b882280991babea504307e3880035c73"} Dec 02 16:45:46 crc kubenswrapper[4747]: I1202 16:45:46.513779 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f977bba4-b1bf-4915-bb71-71a4dd413d9b","Type":"ContainerStarted","Data":"1ea0b7df0c4b962e494968382928ff445015efdc1989b335af141e77cf05f7c1"} Dec 02 16:45:46 crc kubenswrapper[4747]: I1202 16:45:46.519469 4747 generic.go:334] "Generic (PLEG): container finished" podID="88a302d9-a421-4b30-867a-60b3258553d5" containerID="5c25a39a68764f2269c237598da0d984ad83d687b12f1eb8eee1edd39662ebca" exitCode=0 Dec 02 16:45:46 crc kubenswrapper[4747]: I1202 16:45:46.519578 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"88a302d9-a421-4b30-867a-60b3258553d5","Type":"ContainerDied","Data":"5c25a39a68764f2269c237598da0d984ad83d687b12f1eb8eee1edd39662ebca"} Dec 02 16:45:46 crc kubenswrapper[4747]: I1202 16:45:46.522368 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"5c2ea27151f58f94f965f8a87a8ce221e4cd52c4e07ce8fe945106e7173a26bb"} Dec 02 16:45:46 crc kubenswrapper[4747]: I1202 16:45:46.522965 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:45:46 crc kubenswrapper[4747]: I1202 16:45:46.526209 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"0c3b7be3486c94a1e57117998293cebcbb4714c48ebfa4775b0b5accf2291c6e"} Dec 02 16:45:46 crc kubenswrapper[4747]: I1202 16:45:46.593560 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=36.593531752 podStartE2EDuration="36.593531752s" podCreationTimestamp="2025-12-02 16:45:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:45:46.590180266 +0000 UTC m=+177.117069025" watchObservedRunningTime="2025-12-02 16:45:46.593531752 +0000 UTC m=+177.120420501" Dec 02 16:45:47 crc kubenswrapper[4747]: E1202 16:45:47.421606 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 02 16:45:47 crc kubenswrapper[4747]: E1202 16:45:47.421889 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gnj5h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-27x57_openshift-marketplace(36ead3e1-831c-424c-8f1e-4b4213621c9a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 16:45:47 crc kubenswrapper[4747]: E1202 16:45:47.423128 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-27x57" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" Dec 02 16:45:47 crc kubenswrapper[4747]: E1202 16:45:47.693055 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 02 16:45:47 crc kubenswrapper[4747]: E1202 16:45:47.693386 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q5c76,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-jl6rp_openshift-marketplace(6b230695-281f-405b-98c1-eb2e1470889e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 16:45:47 crc kubenswrapper[4747]: E1202 16:45:47.694587 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-jl6rp" podUID="6b230695-281f-405b-98c1-eb2e1470889e" Dec 02 16:45:48 crc kubenswrapper[4747]: E1202 16:45:48.202163 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-27x57" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" Dec 02 16:45:48 crc kubenswrapper[4747]: E1202 16:45:48.202233 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-rxrth" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.253155 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k" Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.258376 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.319851 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/88a302d9-a421-4b30-867a-60b3258553d5-kube-api-access\") pod \"88a302d9-a421-4b30-867a-60b3258553d5\" (UID: \"88a302d9-a421-4b30-867a-60b3258553d5\") " Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.319939 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2300c9ec-034d-4103-8a50-a9c5c507f1ad-config-volume\") pod \"2300c9ec-034d-4103-8a50-a9c5c507f1ad\" (UID: \"2300c9ec-034d-4103-8a50-a9c5c507f1ad\") " Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.319999 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/88a302d9-a421-4b30-867a-60b3258553d5-kubelet-dir\") pod \"88a302d9-a421-4b30-867a-60b3258553d5\" (UID: \"88a302d9-a421-4b30-867a-60b3258553d5\") " Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.320162 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/88a302d9-a421-4b30-867a-60b3258553d5-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "88a302d9-a421-4b30-867a-60b3258553d5" (UID: "88a302d9-a421-4b30-867a-60b3258553d5"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.320221 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95gfp\" (UniqueName: \"kubernetes.io/projected/2300c9ec-034d-4103-8a50-a9c5c507f1ad-kube-api-access-95gfp\") pod \"2300c9ec-034d-4103-8a50-a9c5c507f1ad\" (UID: \"2300c9ec-034d-4103-8a50-a9c5c507f1ad\") " Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.320281 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2300c9ec-034d-4103-8a50-a9c5c507f1ad-secret-volume\") pod \"2300c9ec-034d-4103-8a50-a9c5c507f1ad\" (UID: \"2300c9ec-034d-4103-8a50-a9c5c507f1ad\") " Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.320576 4747 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/88a302d9-a421-4b30-867a-60b3258553d5-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.320613 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2300c9ec-034d-4103-8a50-a9c5c507f1ad-config-volume" (OuterVolumeSpecName: "config-volume") pod "2300c9ec-034d-4103-8a50-a9c5c507f1ad" (UID: "2300c9ec-034d-4103-8a50-a9c5c507f1ad"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.326184 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2300c9ec-034d-4103-8a50-a9c5c507f1ad-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2300c9ec-034d-4103-8a50-a9c5c507f1ad" (UID: "2300c9ec-034d-4103-8a50-a9c5c507f1ad"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.328159 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2300c9ec-034d-4103-8a50-a9c5c507f1ad-kube-api-access-95gfp" (OuterVolumeSpecName: "kube-api-access-95gfp") pod "2300c9ec-034d-4103-8a50-a9c5c507f1ad" (UID: "2300c9ec-034d-4103-8a50-a9c5c507f1ad"). InnerVolumeSpecName "kube-api-access-95gfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.328632 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88a302d9-a421-4b30-867a-60b3258553d5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "88a302d9-a421-4b30-867a-60b3258553d5" (UID: "88a302d9-a421-4b30-867a-60b3258553d5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.424828 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95gfp\" (UniqueName: \"kubernetes.io/projected/2300c9ec-034d-4103-8a50-a9c5c507f1ad-kube-api-access-95gfp\") on node \"crc\" DevicePath \"\"" Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.425369 4747 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2300c9ec-034d-4103-8a50-a9c5c507f1ad-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.425383 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/88a302d9-a421-4b30-867a-60b3258553d5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.425394 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2300c9ec-034d-4103-8a50-a9c5c507f1ad-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.539609 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k" event={"ID":"2300c9ec-034d-4103-8a50-a9c5c507f1ad","Type":"ContainerDied","Data":"69e0b610290ebd2b5b9d0cf836c561d4cc3376da5a550b733e8bed31aa687061"} Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.539667 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69e0b610290ebd2b5b9d0cf836c561d4cc3376da5a550b733e8bed31aa687061" Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.539758 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k" Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.543031 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.545118 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"88a302d9-a421-4b30-867a-60b3258553d5","Type":"ContainerDied","Data":"1da15aa08f46498162215ea55816d5f8fa93eee2187bc6c07426e0535291ebc8"} Dec 02 16:45:48 crc kubenswrapper[4747]: I1202 16:45:48.545164 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1da15aa08f46498162215ea55816d5f8fa93eee2187bc6c07426e0535291ebc8" Dec 02 16:45:48 crc kubenswrapper[4747]: E1202 16:45:48.547515 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-jl6rp" podUID="6b230695-281f-405b-98c1-eb2e1470889e" Dec 02 16:45:49 crc kubenswrapper[4747]: I1202 16:45:49.551992 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vt692" event={"ID":"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27","Type":"ContainerStarted","Data":"1b18dddcc9ac068b154d31763d5ebb4d774738a4502a043f90f8f7a788811522"} Dec 02 16:45:51 crc kubenswrapper[4747]: I1202 16:45:51.564094 4747 generic.go:334] "Generic (PLEG): container finished" podID="b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27" containerID="1b18dddcc9ac068b154d31763d5ebb4d774738a4502a043f90f8f7a788811522" exitCode=0 Dec 02 16:45:51 crc kubenswrapper[4747]: I1202 16:45:51.564158 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vt692" event={"ID":"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27","Type":"ContainerDied","Data":"1b18dddcc9ac068b154d31763d5ebb4d774738a4502a043f90f8f7a788811522"} Dec 02 16:45:57 crc kubenswrapper[4747]: I1202 16:45:57.606537 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vt692" event={"ID":"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27","Type":"ContainerStarted","Data":"f1a89ae4e659475ac93f445f047280ee354ece84ee4abccadc5e6b5a0ebb5121"} Dec 02 16:45:57 crc kubenswrapper[4747]: I1202 16:45:57.612979 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d7l68" event={"ID":"788ee83c-7c8c-4e57-a31e-0f658765b846","Type":"ContainerStarted","Data":"13a8875dfe38affa183c5179fbb28b885beb50fade5cef7764020e96212eb341"} Dec 02 16:45:57 crc kubenswrapper[4747]: I1202 16:45:57.641121 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vt692" podStartSLOduration=4.432722018 podStartE2EDuration="1m32.641089991s" podCreationTimestamp="2025-12-02 16:44:25 +0000 UTC" firstStartedPulling="2025-12-02 16:44:28.566709308 +0000 UTC m=+99.093598057" lastFinishedPulling="2025-12-02 16:45:56.775077281 +0000 UTC m=+187.301966030" observedRunningTime="2025-12-02 16:45:57.636276174 +0000 UTC m=+188.163164923" watchObservedRunningTime="2025-12-02 16:45:57.641089991 +0000 UTC m=+188.167978740" Dec 02 16:45:58 crc kubenswrapper[4747]: I1202 16:45:58.621268 4747 generic.go:334] "Generic (PLEG): container finished" podID="788ee83c-7c8c-4e57-a31e-0f658765b846" containerID="13a8875dfe38affa183c5179fbb28b885beb50fade5cef7764020e96212eb341" exitCode=0 Dec 02 16:45:58 crc kubenswrapper[4747]: I1202 16:45:58.621494 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d7l68" event={"ID":"788ee83c-7c8c-4e57-a31e-0f658765b846","Type":"ContainerDied","Data":"13a8875dfe38affa183c5179fbb28b885beb50fade5cef7764020e96212eb341"} Dec 02 16:45:58 crc kubenswrapper[4747]: I1202 16:45:58.629985 4747 generic.go:334] "Generic (PLEG): container finished" podID="37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" containerID="db9ec764f05263f842db59c01805a73d0ab73d6047c8d0a3303d548359af51e0" exitCode=0 Dec 02 16:45:58 crc kubenswrapper[4747]: I1202 16:45:58.630041 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhq2f" event={"ID":"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553","Type":"ContainerDied","Data":"db9ec764f05263f842db59c01805a73d0ab73d6047c8d0a3303d548359af51e0"} Dec 02 16:46:01 crc kubenswrapper[4747]: I1202 16:46:01.794558 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 16:46:01 crc kubenswrapper[4747]: I1202 16:46:01.794936 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 16:46:02 crc kubenswrapper[4747]: I1202 16:46:02.662527 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d7l68" event={"ID":"788ee83c-7c8c-4e57-a31e-0f658765b846","Type":"ContainerStarted","Data":"c89bbbf09e837fdd3da49caee7d774b0c0540926bf70957856ca29c6d66e3761"} Dec 02 16:46:02 crc kubenswrapper[4747]: I1202 16:46:02.664205 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vsbt4" event={"ID":"362f8e5d-8e91-435a-af79-5d318c2288e0","Type":"ContainerStarted","Data":"b757b8915f9be10abd8ea5477ac15cec1cd1d4c99aff1708eaaca2b590120b78"} Dec 02 16:46:02 crc kubenswrapper[4747]: I1202 16:46:02.666452 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhq2f" event={"ID":"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553","Type":"ContainerStarted","Data":"bb99d27f090ac99c2cc655d4b7265c9e633dd94340be09298adbff623f639c66"} Dec 02 16:46:02 crc kubenswrapper[4747]: I1202 16:46:02.713698 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-d7l68" podStartSLOduration=3.570301067 podStartE2EDuration="1m33.713677782s" podCreationTimestamp="2025-12-02 16:44:29 +0000 UTC" firstStartedPulling="2025-12-02 16:44:31.813095907 +0000 UTC m=+102.339984656" lastFinishedPulling="2025-12-02 16:46:01.956472622 +0000 UTC m=+192.483361371" observedRunningTime="2025-12-02 16:46:02.710172572 +0000 UTC m=+193.237061341" watchObservedRunningTime="2025-12-02 16:46:02.713677782 +0000 UTC m=+193.240566531" Dec 02 16:46:02 crc kubenswrapper[4747]: I1202 16:46:02.730075 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dhq2f" podStartSLOduration=3.425859197 podStartE2EDuration="1m34.730047068s" podCreationTimestamp="2025-12-02 16:44:28 +0000 UTC" firstStartedPulling="2025-12-02 16:44:30.706752493 +0000 UTC m=+101.233641242" lastFinishedPulling="2025-12-02 16:46:02.010940344 +0000 UTC m=+192.537829113" observedRunningTime="2025-12-02 16:46:02.725942721 +0000 UTC m=+193.252831470" watchObservedRunningTime="2025-12-02 16:46:02.730047068 +0000 UTC m=+193.256935817" Dec 02 16:46:03 crc kubenswrapper[4747]: I1202 16:46:03.674954 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vsbt4" event={"ID":"362f8e5d-8e91-435a-af79-5d318c2288e0","Type":"ContainerDied","Data":"b757b8915f9be10abd8ea5477ac15cec1cd1d4c99aff1708eaaca2b590120b78"} Dec 02 16:46:03 crc kubenswrapper[4747]: I1202 16:46:03.675027 4747 generic.go:334] "Generic (PLEG): container finished" podID="362f8e5d-8e91-435a-af79-5d318c2288e0" containerID="b757b8915f9be10abd8ea5477ac15cec1cd1d4c99aff1708eaaca2b590120b78" exitCode=0 Dec 02 16:46:05 crc kubenswrapper[4747]: I1202 16:46:05.691850 4747 generic.go:334] "Generic (PLEG): container finished" podID="36ead3e1-831c-424c-8f1e-4b4213621c9a" containerID="ee181b4f372050c11e84339236e022d0477131b5194d65293b232a4505a5d4cb" exitCode=0 Dec 02 16:46:05 crc kubenswrapper[4747]: I1202 16:46:05.691989 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27x57" event={"ID":"36ead3e1-831c-424c-8f1e-4b4213621c9a","Type":"ContainerDied","Data":"ee181b4f372050c11e84339236e022d0477131b5194d65293b232a4505a5d4cb"} Dec 02 16:46:05 crc kubenswrapper[4747]: I1202 16:46:05.696511 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mvzvc" event={"ID":"2fad8dbb-9212-436a-bad9-7439f27afec6","Type":"ContainerStarted","Data":"450d29df166dad4c7c29f4a155ce9e394121102dfb038dcb9c1883185bdd508a"} Dec 02 16:46:05 crc kubenswrapper[4747]: I1202 16:46:05.908712 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vt692" Dec 02 16:46:05 crc kubenswrapper[4747]: I1202 16:46:05.909328 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vt692" Dec 02 16:46:06 crc kubenswrapper[4747]: I1202 16:46:06.021486 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vt692" Dec 02 16:46:06 crc kubenswrapper[4747]: I1202 16:46:06.706714 4747 generic.go:334] "Generic (PLEG): container finished" podID="2fad8dbb-9212-436a-bad9-7439f27afec6" containerID="450d29df166dad4c7c29f4a155ce9e394121102dfb038dcb9c1883185bdd508a" exitCode=0 Dec 02 16:46:06 crc kubenswrapper[4747]: I1202 16:46:06.706817 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mvzvc" event={"ID":"2fad8dbb-9212-436a-bad9-7439f27afec6","Type":"ContainerDied","Data":"450d29df166dad4c7c29f4a155ce9e394121102dfb038dcb9c1883185bdd508a"} Dec 02 16:46:06 crc kubenswrapper[4747]: I1202 16:46:06.710990 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxrth" event={"ID":"066ad5e3-8cd4-4a73-895d-76671a0d6aa9","Type":"ContainerStarted","Data":"a4ca2f81180147b22fd8bb4d7190e2e2cdacaa72fb5afb97e2928d6cbf452c03"} Dec 02 16:46:06 crc kubenswrapper[4747]: I1202 16:46:06.715636 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vsbt4" event={"ID":"362f8e5d-8e91-435a-af79-5d318c2288e0","Type":"ContainerStarted","Data":"7939ff0151ef0346743adb1664004371257ecf893ed4c45cdeea111337598042"} Dec 02 16:46:06 crc kubenswrapper[4747]: I1202 16:46:06.781963 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vsbt4" podStartSLOduration=3.13507052 podStartE2EDuration="1m40.771688381s" podCreationTimestamp="2025-12-02 16:44:26 +0000 UTC" firstStartedPulling="2025-12-02 16:44:28.648907761 +0000 UTC m=+99.175796510" lastFinishedPulling="2025-12-02 16:46:06.285525632 +0000 UTC m=+196.812414371" observedRunningTime="2025-12-02 16:46:06.76462527 +0000 UTC m=+197.291514029" watchObservedRunningTime="2025-12-02 16:46:06.771688381 +0000 UTC m=+197.298577120" Dec 02 16:46:06 crc kubenswrapper[4747]: I1202 16:46:06.797410 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vt692" Dec 02 16:46:07 crc kubenswrapper[4747]: I1202 16:46:07.722830 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27x57" event={"ID":"36ead3e1-831c-424c-8f1e-4b4213621c9a","Type":"ContainerStarted","Data":"6df30ec5bdce66e6c2453e92765d6977a3ea191dabbb3565796df086c8a59a83"} Dec 02 16:46:07 crc kubenswrapper[4747]: I1202 16:46:07.724860 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jl6rp" event={"ID":"6b230695-281f-405b-98c1-eb2e1470889e","Type":"ContainerStarted","Data":"b15752ba69a141a903f0e8c84c16fe6e1350a212ebfcb37060a935a6f98709f1"} Dec 02 16:46:07 crc kubenswrapper[4747]: I1202 16:46:07.726303 4747 generic.go:334] "Generic (PLEG): container finished" podID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" containerID="a4ca2f81180147b22fd8bb4d7190e2e2cdacaa72fb5afb97e2928d6cbf452c03" exitCode=0 Dec 02 16:46:07 crc kubenswrapper[4747]: I1202 16:46:07.726519 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxrth" event={"ID":"066ad5e3-8cd4-4a73-895d-76671a0d6aa9","Type":"ContainerDied","Data":"a4ca2f81180147b22fd8bb4d7190e2e2cdacaa72fb5afb97e2928d6cbf452c03"} Dec 02 16:46:07 crc kubenswrapper[4747]: I1202 16:46:07.782882 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-27x57" podStartSLOduration=3.6408573029999998 podStartE2EDuration="1m39.782860577s" podCreationTimestamp="2025-12-02 16:44:28 +0000 UTC" firstStartedPulling="2025-12-02 16:44:30.698128797 +0000 UTC m=+101.225017546" lastFinishedPulling="2025-12-02 16:46:06.840132071 +0000 UTC m=+197.367020820" observedRunningTime="2025-12-02 16:46:07.752162022 +0000 UTC m=+198.279050771" watchObservedRunningTime="2025-12-02 16:46:07.782860577 +0000 UTC m=+198.309749326" Dec 02 16:46:08 crc kubenswrapper[4747]: I1202 16:46:08.361864 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-27x57" Dec 02 16:46:08 crc kubenswrapper[4747]: I1202 16:46:08.361949 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-27x57" Dec 02 16:46:08 crc kubenswrapper[4747]: I1202 16:46:08.735544 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mvzvc" event={"ID":"2fad8dbb-9212-436a-bad9-7439f27afec6","Type":"ContainerStarted","Data":"d06dc27d9dc508495aad4f3fe509b39572b2b51a1dfb927f20df6dee43f84878"} Dec 02 16:46:09 crc kubenswrapper[4747]: I1202 16:46:09.249810 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dhq2f" Dec 02 16:46:09 crc kubenswrapper[4747]: I1202 16:46:09.250047 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dhq2f" Dec 02 16:46:09 crc kubenswrapper[4747]: I1202 16:46:09.466926 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-27x57" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" containerName="registry-server" probeResult="failure" output=< Dec 02 16:46:09 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Dec 02 16:46:09 crc kubenswrapper[4747]: > Dec 02 16:46:09 crc kubenswrapper[4747]: I1202 16:46:09.743810 4747 generic.go:334] "Generic (PLEG): container finished" podID="6b230695-281f-405b-98c1-eb2e1470889e" containerID="b15752ba69a141a903f0e8c84c16fe6e1350a212ebfcb37060a935a6f98709f1" exitCode=0 Dec 02 16:46:09 crc kubenswrapper[4747]: I1202 16:46:09.743894 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jl6rp" event={"ID":"6b230695-281f-405b-98c1-eb2e1470889e","Type":"ContainerDied","Data":"b15752ba69a141a903f0e8c84c16fe6e1350a212ebfcb37060a935a6f98709f1"} Dec 02 16:46:09 crc kubenswrapper[4747]: I1202 16:46:09.817114 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-d7l68" Dec 02 16:46:09 crc kubenswrapper[4747]: I1202 16:46:09.817213 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-d7l68" Dec 02 16:46:10 crc kubenswrapper[4747]: I1202 16:46:10.390543 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dhq2f" podUID="37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" containerName="registry-server" probeResult="failure" output=< Dec 02 16:46:10 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Dec 02 16:46:10 crc kubenswrapper[4747]: > Dec 02 16:46:10 crc kubenswrapper[4747]: I1202 16:46:10.403945 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-d7l68" Dec 02 16:46:10 crc kubenswrapper[4747]: I1202 16:46:10.776038 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mvzvc" podStartSLOduration=5.900963359 podStartE2EDuration="1m43.776013277s" podCreationTimestamp="2025-12-02 16:44:27 +0000 UTC" firstStartedPulling="2025-12-02 16:44:29.648119261 +0000 UTC m=+100.175008010" lastFinishedPulling="2025-12-02 16:46:07.523169179 +0000 UTC m=+198.050057928" observedRunningTime="2025-12-02 16:46:10.772467725 +0000 UTC m=+201.299356474" watchObservedRunningTime="2025-12-02 16:46:10.776013277 +0000 UTC m=+201.302902036" Dec 02 16:46:10 crc kubenswrapper[4747]: I1202 16:46:10.809648 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-d7l68" Dec 02 16:46:14 crc kubenswrapper[4747]: I1202 16:46:14.190774 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d7l68"] Dec 02 16:46:14 crc kubenswrapper[4747]: I1202 16:46:14.192124 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-d7l68" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" containerName="registry-server" containerID="cri-o://c89bbbf09e837fdd3da49caee7d774b0c0540926bf70957856ca29c6d66e3761" gracePeriod=2 Dec 02 16:46:16 crc kubenswrapper[4747]: I1202 16:46:16.244920 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 16:46:16 crc kubenswrapper[4747]: I1202 16:46:16.383195 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vsbt4" Dec 02 16:46:16 crc kubenswrapper[4747]: I1202 16:46:16.383281 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vsbt4" Dec 02 16:46:16 crc kubenswrapper[4747]: I1202 16:46:16.429888 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vsbt4" Dec 02 16:46:16 crc kubenswrapper[4747]: I1202 16:46:16.854774 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vsbt4" Dec 02 16:46:18 crc kubenswrapper[4747]: I1202 16:46:18.040030 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mvzvc" Dec 02 16:46:18 crc kubenswrapper[4747]: I1202 16:46:18.040473 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mvzvc" Dec 02 16:46:18 crc kubenswrapper[4747]: I1202 16:46:18.099780 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mvzvc" Dec 02 16:46:18 crc kubenswrapper[4747]: I1202 16:46:18.406575 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-27x57" Dec 02 16:46:18 crc kubenswrapper[4747]: I1202 16:46:18.454666 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-27x57" Dec 02 16:46:18 crc kubenswrapper[4747]: I1202 16:46:18.856218 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mvzvc" Dec 02 16:46:19 crc kubenswrapper[4747]: I1202 16:46:19.292720 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dhq2f" Dec 02 16:46:19 crc kubenswrapper[4747]: I1202 16:46:19.342572 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dhq2f" Dec 02 16:46:19 crc kubenswrapper[4747]: I1202 16:46:19.397489 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-27x57"] Dec 02 16:46:19 crc kubenswrapper[4747]: I1202 16:46:19.808236 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-d7l68_788ee83c-7c8c-4e57-a31e-0f658765b846/registry-server/0.log" Dec 02 16:46:19 crc kubenswrapper[4747]: I1202 16:46:19.809276 4747 generic.go:334] "Generic (PLEG): container finished" podID="788ee83c-7c8c-4e57-a31e-0f658765b846" containerID="c89bbbf09e837fdd3da49caee7d774b0c0540926bf70957856ca29c6d66e3761" exitCode=137 Dec 02 16:46:19 crc kubenswrapper[4747]: I1202 16:46:19.809406 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d7l68" event={"ID":"788ee83c-7c8c-4e57-a31e-0f658765b846","Type":"ContainerDied","Data":"c89bbbf09e837fdd3da49caee7d774b0c0540926bf70957856ca29c6d66e3761"} Dec 02 16:46:19 crc kubenswrapper[4747]: I1202 16:46:19.809987 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-27x57" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" containerName="registry-server" containerID="cri-o://6df30ec5bdce66e6c2453e92765d6977a3ea191dabbb3565796df086c8a59a83" gracePeriod=2 Dec 02 16:46:19 crc kubenswrapper[4747]: E1202 16:46:19.817783 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c89bbbf09e837fdd3da49caee7d774b0c0540926bf70957856ca29c6d66e3761 is running failed: container process not found" containerID="c89bbbf09e837fdd3da49caee7d774b0c0540926bf70957856ca29c6d66e3761" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 16:46:19 crc kubenswrapper[4747]: E1202 16:46:19.818497 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c89bbbf09e837fdd3da49caee7d774b0c0540926bf70957856ca29c6d66e3761 is running failed: container process not found" containerID="c89bbbf09e837fdd3da49caee7d774b0c0540926bf70957856ca29c6d66e3761" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 16:46:19 crc kubenswrapper[4747]: E1202 16:46:19.818800 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c89bbbf09e837fdd3da49caee7d774b0c0540926bf70957856ca29c6d66e3761 is running failed: container process not found" containerID="c89bbbf09e837fdd3da49caee7d774b0c0540926bf70957856ca29c6d66e3761" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 16:46:19 crc kubenswrapper[4747]: E1202 16:46:19.818849 4747 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c89bbbf09e837fdd3da49caee7d774b0c0540926bf70957856ca29c6d66e3761 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-d7l68" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" containerName="registry-server" Dec 02 16:46:22 crc kubenswrapper[4747]: I1202 16:46:22.836025 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxrth" event={"ID":"066ad5e3-8cd4-4a73-895d-76671a0d6aa9","Type":"ContainerStarted","Data":"fc37487d8a08e22fc213e62713c455c4fa35508916ab38602ec077a16fc45a59"} Dec 02 16:46:22 crc kubenswrapper[4747]: I1202 16:46:22.847641 4747 generic.go:334] "Generic (PLEG): container finished" podID="36ead3e1-831c-424c-8f1e-4b4213621c9a" containerID="6df30ec5bdce66e6c2453e92765d6977a3ea191dabbb3565796df086c8a59a83" exitCode=0 Dec 02 16:46:22 crc kubenswrapper[4747]: I1202 16:46:22.847750 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27x57" event={"ID":"36ead3e1-831c-424c-8f1e-4b4213621c9a","Type":"ContainerDied","Data":"6df30ec5bdce66e6c2453e92765d6977a3ea191dabbb3565796df086c8a59a83"} Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.048135 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-d7l68_788ee83c-7c8c-4e57-a31e-0f658765b846/registry-server/0.log" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.049949 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d7l68" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.110212 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftqx7\" (UniqueName: \"kubernetes.io/projected/788ee83c-7c8c-4e57-a31e-0f658765b846-kube-api-access-ftqx7\") pod \"788ee83c-7c8c-4e57-a31e-0f658765b846\" (UID: \"788ee83c-7c8c-4e57-a31e-0f658765b846\") " Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.110309 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/788ee83c-7c8c-4e57-a31e-0f658765b846-utilities\") pod \"788ee83c-7c8c-4e57-a31e-0f658765b846\" (UID: \"788ee83c-7c8c-4e57-a31e-0f658765b846\") " Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.110394 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/788ee83c-7c8c-4e57-a31e-0f658765b846-catalog-content\") pod \"788ee83c-7c8c-4e57-a31e-0f658765b846\" (UID: \"788ee83c-7c8c-4e57-a31e-0f658765b846\") " Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.113395 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/788ee83c-7c8c-4e57-a31e-0f658765b846-utilities" (OuterVolumeSpecName: "utilities") pod "788ee83c-7c8c-4e57-a31e-0f658765b846" (UID: "788ee83c-7c8c-4e57-a31e-0f658765b846"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.120246 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/788ee83c-7c8c-4e57-a31e-0f658765b846-kube-api-access-ftqx7" (OuterVolumeSpecName: "kube-api-access-ftqx7") pod "788ee83c-7c8c-4e57-a31e-0f658765b846" (UID: "788ee83c-7c8c-4e57-a31e-0f658765b846"). InnerVolumeSpecName "kube-api-access-ftqx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.213035 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftqx7\" (UniqueName: \"kubernetes.io/projected/788ee83c-7c8c-4e57-a31e-0f658765b846-kube-api-access-ftqx7\") on node \"crc\" DevicePath \"\"" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.213079 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/788ee83c-7c8c-4e57-a31e-0f658765b846-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.241852 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/788ee83c-7c8c-4e57-a31e-0f658765b846-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "788ee83c-7c8c-4e57-a31e-0f658765b846" (UID: "788ee83c-7c8c-4e57-a31e-0f658765b846"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.313642 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/788ee83c-7c8c-4e57-a31e-0f658765b846-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.689152 4747 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.689480 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86" gracePeriod=15 Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.689619 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22" gracePeriod=15 Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.689690 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208" gracePeriod=15 Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.689657 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88" gracePeriod=15 Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.689733 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf" gracePeriod=15 Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691178 4747 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 16:46:23 crc kubenswrapper[4747]: E1202 16:46:23.691422 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691439 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 16:46:23 crc kubenswrapper[4747]: E1202 16:46:23.691450 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691458 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 16:46:23 crc kubenswrapper[4747]: E1202 16:46:23.691465 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691470 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 16:46:23 crc kubenswrapper[4747]: E1202 16:46:23.691483 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" containerName="extract-utilities" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691488 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" containerName="extract-utilities" Dec 02 16:46:23 crc kubenswrapper[4747]: E1202 16:46:23.691501 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691506 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 02 16:46:23 crc kubenswrapper[4747]: E1202 16:46:23.691514 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2300c9ec-034d-4103-8a50-a9c5c507f1ad" containerName="collect-profiles" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691520 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2300c9ec-034d-4103-8a50-a9c5c507f1ad" containerName="collect-profiles" Dec 02 16:46:23 crc kubenswrapper[4747]: E1202 16:46:23.691528 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691534 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 16:46:23 crc kubenswrapper[4747]: E1202 16:46:23.691542 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691548 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 16:46:23 crc kubenswrapper[4747]: E1202 16:46:23.691556 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" containerName="registry-server" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691562 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" containerName="registry-server" Dec 02 16:46:23 crc kubenswrapper[4747]: E1202 16:46:23.691569 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88a302d9-a421-4b30-867a-60b3258553d5" containerName="pruner" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691575 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="88a302d9-a421-4b30-867a-60b3258553d5" containerName="pruner" Dec 02 16:46:23 crc kubenswrapper[4747]: E1202 16:46:23.691582 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691589 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 16:46:23 crc kubenswrapper[4747]: E1202 16:46:23.691599 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" containerName="extract-content" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691605 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" containerName="extract-content" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691719 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691734 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691743 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691752 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691760 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2300c9ec-034d-4103-8a50-a9c5c507f1ad" containerName="collect-profiles" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691767 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" containerName="registry-server" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691777 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691784 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="88a302d9-a421-4b30-867a-60b3258553d5" containerName="pruner" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.691991 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.693111 4747 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.694080 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.701873 4747 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.720091 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.720222 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.720313 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.734849 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.821589 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.821653 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.821676 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.821694 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.821734 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.821758 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.821781 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.821796 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.821900 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.821957 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.821982 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.857780 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-d7l68_788ee83c-7c8c-4e57-a31e-0f658765b846/registry-server/0.log" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.858668 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d7l68" event={"ID":"788ee83c-7c8c-4e57-a31e-0f658765b846","Type":"ContainerDied","Data":"fa340af51b029a392735fa527154f9b56ef003c75e212923043f31cc5fe2f8af"} Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.858724 4747 scope.go:117] "RemoveContainer" containerID="c89bbbf09e837fdd3da49caee7d774b0c0540926bf70957856ca29c6d66e3761" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.858918 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d7l68" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.860808 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.861120 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.867106 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.868197 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.885896 4747 scope.go:117] "RemoveContainer" containerID="13a8875dfe38affa183c5179fbb28b885beb50fade5cef7764020e96212eb341" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.904661 4747 scope.go:117] "RemoveContainer" containerID="b057fb7deaf7c7ecac1dc423aabc999ac8f8eed4bffcf9c6650ad84fd6a088a8" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.923013 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.923071 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.923096 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.923137 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.923163 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.923169 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.923199 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.923249 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.923248 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:46:23 crc kubenswrapper[4747]: I1202 16:46:23.923276 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:46:24 crc kubenswrapper[4747]: I1202 16:46:24.034427 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:46:24 crc kubenswrapper[4747]: W1202 16:46:24.056452 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-7b74c293858e44e894c06d48a0c0e95fe3c2bdda3e24412a0f3267b3f53f043e WatchSource:0}: Error finding container 7b74c293858e44e894c06d48a0c0e95fe3c2bdda3e24412a0f3267b3f53f043e: Status 404 returned error can't find the container with id 7b74c293858e44e894c06d48a0c0e95fe3c2bdda3e24412a0f3267b3f53f043e Dec 02 16:46:24 crc kubenswrapper[4747]: E1202 16:46:24.059205 4747 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.201:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187d73d42916407f openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 16:46:24.058687615 +0000 UTC m=+214.585576364,LastTimestamp:2025-12-02 16:46:24.058687615 +0000 UTC m=+214.585576364,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 16:46:24 crc kubenswrapper[4747]: E1202 16:46:24.710704 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:46:24Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:46:24Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:46:24Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:46:24Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:6adbf9c899ee2ad98ce67a6c0ff13492e290ae78ed6f1016e0b15309abcd1988\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:a5e5e73303355d352c60d32ff983f5cd488878dc7249cb03949ec520cf99f02b\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1608275786},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1f2a98779239dfafa38d3fb89250a2691f75894c155b5c43fcc421a653bf9273\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:6a549becfb2bf10c272884c5858c442eeaa5b3eb8a726dc460b0a79d0164f7ed\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1204220237},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:3bb6e76bb2fc875de6aae6909205aad0af8b2a476f3b7e31f64d5ae8e6659572\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:54a0b5857af1053fc62860dff0f0cb8f974ab781ba9fc5722277c34ef2a16b4e\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1201277260},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:e8990432556acad31519b1a73ec32f32d27c2034cf9e5cc4db8980efc7331594\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:ebe9f523f5c211a3a0f2570331dddcd5be15b12c1fecd9b8b121f881bfaad029\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1129027903},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:24 crc kubenswrapper[4747]: E1202 16:46:24.712031 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:24 crc kubenswrapper[4747]: E1202 16:46:24.712830 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:24 crc kubenswrapper[4747]: E1202 16:46:24.713607 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:24 crc kubenswrapper[4747]: E1202 16:46:24.714005 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:24 crc kubenswrapper[4747]: E1202 16:46:24.714118 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 16:46:24 crc kubenswrapper[4747]: E1202 16:46:24.718694 4747 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:24 crc kubenswrapper[4747]: E1202 16:46:24.719185 4747 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:24 crc kubenswrapper[4747]: E1202 16:46:24.719687 4747 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:24 crc kubenswrapper[4747]: E1202 16:46:24.720330 4747 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:24 crc kubenswrapper[4747]: E1202 16:46:24.720997 4747 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:24 crc kubenswrapper[4747]: I1202 16:46:24.721047 4747 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 02 16:46:24 crc kubenswrapper[4747]: E1202 16:46:24.721926 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="200ms" Dec 02 16:46:24 crc kubenswrapper[4747]: I1202 16:46:24.868174 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"7b74c293858e44e894c06d48a0c0e95fe3c2bdda3e24412a0f3267b3f53f043e"} Dec 02 16:46:24 crc kubenswrapper[4747]: I1202 16:46:24.868445 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:24 crc kubenswrapper[4747]: I1202 16:46:24.868783 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:24 crc kubenswrapper[4747]: I1202 16:46:24.869260 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:24 crc kubenswrapper[4747]: E1202 16:46:24.922991 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="400ms" Dec 02 16:46:25 crc kubenswrapper[4747]: E1202 16:46:25.324580 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="800ms" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.712944 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-27x57" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.714257 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.714953 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.715555 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.715996 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.858080 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36ead3e1-831c-424c-8f1e-4b4213621c9a-utilities\") pod \"36ead3e1-831c-424c-8f1e-4b4213621c9a\" (UID: \"36ead3e1-831c-424c-8f1e-4b4213621c9a\") " Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.858488 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnj5h\" (UniqueName: \"kubernetes.io/projected/36ead3e1-831c-424c-8f1e-4b4213621c9a-kube-api-access-gnj5h\") pod \"36ead3e1-831c-424c-8f1e-4b4213621c9a\" (UID: \"36ead3e1-831c-424c-8f1e-4b4213621c9a\") " Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.858527 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36ead3e1-831c-424c-8f1e-4b4213621c9a-catalog-content\") pod \"36ead3e1-831c-424c-8f1e-4b4213621c9a\" (UID: \"36ead3e1-831c-424c-8f1e-4b4213621c9a\") " Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.861371 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36ead3e1-831c-424c-8f1e-4b4213621c9a-utilities" (OuterVolumeSpecName: "utilities") pod "36ead3e1-831c-424c-8f1e-4b4213621c9a" (UID: "36ead3e1-831c-424c-8f1e-4b4213621c9a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.888110 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36ead3e1-831c-424c-8f1e-4b4213621c9a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "36ead3e1-831c-424c-8f1e-4b4213621c9a" (UID: "36ead3e1-831c-424c-8f1e-4b4213621c9a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.889654 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36ead3e1-831c-424c-8f1e-4b4213621c9a-kube-api-access-gnj5h" (OuterVolumeSpecName: "kube-api-access-gnj5h") pod "36ead3e1-831c-424c-8f1e-4b4213621c9a" (UID: "36ead3e1-831c-424c-8f1e-4b4213621c9a"). InnerVolumeSpecName "kube-api-access-gnj5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.892408 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.902161 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.903844 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88" exitCode=0 Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.903898 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22" exitCode=0 Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.904130 4747 scope.go:117] "RemoveContainer" containerID="93152fc97324223bad2c24f31f578d2e2d4f8f09ed7389aebf9bbbc224f65ab5" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.904151 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208" exitCode=0 Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.904167 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf" exitCode=2 Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.908127 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"fe7ac99f1362c116558f4df2657ec8e50f57894dc5ce50f96e4cbd2d861f194b"} Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.910808 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.911320 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.911552 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.911793 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.915539 4747 generic.go:334] "Generic (PLEG): container finished" podID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" containerID="1ea0b7df0c4b962e494968382928ff445015efdc1989b335af141e77cf05f7c1" exitCode=0 Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.915661 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f977bba4-b1bf-4915-bb71-71a4dd413d9b","Type":"ContainerDied","Data":"1ea0b7df0c4b962e494968382928ff445015efdc1989b335af141e77cf05f7c1"} Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.916852 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.917358 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.917555 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.917711 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.917892 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.926563 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-27x57" event={"ID":"36ead3e1-831c-424c-8f1e-4b4213621c9a","Type":"ContainerDied","Data":"3506ac0ddd3fdab431dc69166d5d848a96db34e552e69727450c6c2010d8c98b"} Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.926729 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-27x57" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.928365 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.930402 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.932278 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.934208 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.934625 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.957898 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.959190 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.960076 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.960438 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.960883 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36ead3e1-831c-424c-8f1e-4b4213621c9a-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.960969 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnj5h\" (UniqueName: \"kubernetes.io/projected/36ead3e1-831c-424c-8f1e-4b4213621c9a-kube-api-access-gnj5h\") on node \"crc\" DevicePath \"\"" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.960989 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36ead3e1-831c-424c-8f1e-4b4213621c9a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 16:46:25 crc kubenswrapper[4747]: I1202 16:46:25.960889 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:26 crc kubenswrapper[4747]: E1202 16:46:26.125930 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="1.6s" Dec 02 16:46:26 crc kubenswrapper[4747]: E1202 16:46:26.594711 4747 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.201:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187d73d42916407f openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 16:46:24.058687615 +0000 UTC m=+214.585576364,LastTimestamp:2025-12-02 16:46:24.058687615 +0000 UTC m=+214.585576364,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 16:46:26 crc kubenswrapper[4747]: I1202 16:46:26.630869 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rxrth" Dec 02 16:46:26 crc kubenswrapper[4747]: I1202 16:46:26.630963 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rxrth" Dec 02 16:46:26 crc kubenswrapper[4747]: I1202 16:46:26.651578 4747 scope.go:117] "RemoveContainer" containerID="6df30ec5bdce66e6c2453e92765d6977a3ea191dabbb3565796df086c8a59a83" Dec 02 16:46:26 crc kubenswrapper[4747]: I1202 16:46:26.672731 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rxrth" Dec 02 16:46:26 crc kubenswrapper[4747]: I1202 16:46:26.673401 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:26 crc kubenswrapper[4747]: I1202 16:46:26.674015 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:26 crc kubenswrapper[4747]: I1202 16:46:26.674925 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:26 crc kubenswrapper[4747]: I1202 16:46:26.675307 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:26 crc kubenswrapper[4747]: I1202 16:46:26.675656 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:26 crc kubenswrapper[4747]: I1202 16:46:26.936069 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 16:46:26 crc kubenswrapper[4747]: I1202 16:46:26.938169 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86" exitCode=0 Dec 02 16:46:26 crc kubenswrapper[4747]: I1202 16:46:26.981120 4747 scope.go:117] "RemoveContainer" containerID="ee181b4f372050c11e84339236e022d0477131b5194d65293b232a4505a5d4cb" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.007088 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rxrth" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.007802 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.009102 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.009592 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.012105 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.012303 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.027820 4747 scope.go:117] "RemoveContainer" containerID="a2c4ac71dc25c16e8d5b255f73349c26c241ea3459093e5c9a4bd1d99b760765" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.320032 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.320768 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.321098 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.321378 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.321634 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.321980 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.331222 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.332453 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.333131 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.334071 4747 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.334387 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.334741 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.335073 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.335578 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.483817 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f977bba4-b1bf-4915-bb71-71a4dd413d9b-kube-api-access\") pod \"f977bba4-b1bf-4915-bb71-71a4dd413d9b\" (UID: \"f977bba4-b1bf-4915-bb71-71a4dd413d9b\") " Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.484265 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f977bba4-b1bf-4915-bb71-71a4dd413d9b-var-lock\") pod \"f977bba4-b1bf-4915-bb71-71a4dd413d9b\" (UID: \"f977bba4-b1bf-4915-bb71-71a4dd413d9b\") " Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.484318 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.484330 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f977bba4-b1bf-4915-bb71-71a4dd413d9b-var-lock" (OuterVolumeSpecName: "var-lock") pod "f977bba4-b1bf-4915-bb71-71a4dd413d9b" (UID: "f977bba4-b1bf-4915-bb71-71a4dd413d9b"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.484354 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f977bba4-b1bf-4915-bb71-71a4dd413d9b-kubelet-dir\") pod \"f977bba4-b1bf-4915-bb71-71a4dd413d9b\" (UID: \"f977bba4-b1bf-4915-bb71-71a4dd413d9b\") " Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.484394 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.484416 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.484461 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.484452 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.484464 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f977bba4-b1bf-4915-bb71-71a4dd413d9b-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f977bba4-b1bf-4915-bb71-71a4dd413d9b" (UID: "f977bba4-b1bf-4915-bb71-71a4dd413d9b"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.484528 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.485006 4747 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f977bba4-b1bf-4915-bb71-71a4dd413d9b-var-lock\") on node \"crc\" DevicePath \"\"" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.485025 4747 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.485038 4747 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f977bba4-b1bf-4915-bb71-71a4dd413d9b-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.485050 4747 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.485061 4747 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.491844 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f977bba4-b1bf-4915-bb71-71a4dd413d9b-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f977bba4-b1bf-4915-bb71-71a4dd413d9b" (UID: "f977bba4-b1bf-4915-bb71-71a4dd413d9b"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.585917 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f977bba4-b1bf-4915-bb71-71a4dd413d9b-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 16:46:27 crc kubenswrapper[4747]: E1202 16:46:27.727895 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="3.2s" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.768221 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.950199 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.951523 4747 scope.go:117] "RemoveContainer" containerID="52309aeb87292e8a2a85ca64f8668455fb9ff4f1b2342cb4f761e43744f12c88" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.951566 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.953016 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.953431 4747 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.954184 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.955108 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.955686 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.955811 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jl6rp" event={"ID":"6b230695-281f-405b-98c1-eb2e1470889e","Type":"ContainerStarted","Data":"55861e680b4fb7d38b22b39cd5542da2a9e35d4c991f41e0e9c6f4a66390a1b1"} Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.955996 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.956401 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.956688 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.957025 4747 status_manager.go:851] "Failed to get status for pod" podUID="6b230695-281f-405b-98c1-eb2e1470889e" pod="openshift-marketplace/certified-operators-jl6rp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-jl6rp\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.957364 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.957698 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.958026 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.958307 4747 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.958738 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.959069 4747 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.959150 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.959075 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f977bba4-b1bf-4915-bb71-71a4dd413d9b","Type":"ContainerDied","Data":"43d177fc68716b952919c221c1239a9084b6c6ec8e478ce6093d74560ef12faa"} Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.959229 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="43d177fc68716b952919c221c1239a9084b6c6ec8e478ce6093d74560ef12faa" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.959505 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.960091 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.960386 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.960606 4747 status_manager.go:851] "Failed to get status for pod" podUID="6b230695-281f-405b-98c1-eb2e1470889e" pod="openshift-marketplace/certified-operators-jl6rp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-jl6rp\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.960768 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.962485 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.962708 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.962882 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.963280 4747 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.963866 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.964266 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.964562 4747 status_manager.go:851] "Failed to get status for pod" podUID="6b230695-281f-405b-98c1-eb2e1470889e" pod="openshift-marketplace/certified-operators-jl6rp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-jl6rp\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.971421 4747 scope.go:117] "RemoveContainer" containerID="18379f84567a607da841ec38dbe3ebf2225792d4e659edebe36b69924d075c22" Dec 02 16:46:27 crc kubenswrapper[4747]: I1202 16:46:27.988030 4747 scope.go:117] "RemoveContainer" containerID="ddf466e787dd2e3171a1a19b4bd4028f6d415f80ade05a3e67a12fdeedfda208" Dec 02 16:46:28 crc kubenswrapper[4747]: I1202 16:46:28.006978 4747 scope.go:117] "RemoveContainer" containerID="e146033ea1558ffad22dfaba6061fb033f8ad5410400d3bc6db6104fa9612ccf" Dec 02 16:46:28 crc kubenswrapper[4747]: I1202 16:46:28.049302 4747 scope.go:117] "RemoveContainer" containerID="639e6c9db5160a872041a433fec442aee7a52dfdd66c7b746d4e3dbd3da21b86" Dec 02 16:46:28 crc kubenswrapper[4747]: I1202 16:46:28.083502 4747 scope.go:117] "RemoveContainer" containerID="39d82b0ee65fcd45383ffed4bdf79127c8a94ef27496430a339f99a570dafc75" Dec 02 16:46:29 crc kubenswrapper[4747]: I1202 16:46:29.763387 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:29 crc kubenswrapper[4747]: I1202 16:46:29.764668 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:29 crc kubenswrapper[4747]: I1202 16:46:29.765261 4747 status_manager.go:851] "Failed to get status for pod" podUID="6b230695-281f-405b-98c1-eb2e1470889e" pod="openshift-marketplace/certified-operators-jl6rp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-jl6rp\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:29 crc kubenswrapper[4747]: I1202 16:46:29.765536 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:29 crc kubenswrapper[4747]: I1202 16:46:29.765787 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:29 crc kubenswrapper[4747]: I1202 16:46:29.767234 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:29 crc kubenswrapper[4747]: I1202 16:46:29.768667 4747 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:30 crc kubenswrapper[4747]: E1202 16:46:30.929514 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="6.4s" Dec 02 16:46:31 crc kubenswrapper[4747]: I1202 16:46:31.796407 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 16:46:31 crc kubenswrapper[4747]: I1202 16:46:31.796806 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 16:46:31 crc kubenswrapper[4747]: I1202 16:46:31.796875 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:46:31 crc kubenswrapper[4747]: I1202 16:46:31.797630 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa"} pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 16:46:31 crc kubenswrapper[4747]: I1202 16:46:31.797770 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" containerID="cri-o://e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa" gracePeriod=600 Dec 02 16:46:32 crc kubenswrapper[4747]: I1202 16:46:32.989117 4747 generic.go:334] "Generic (PLEG): container finished" podID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerID="e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa" exitCode=0 Dec 02 16:46:32 crc kubenswrapper[4747]: I1202 16:46:32.989200 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerDied","Data":"e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa"} Dec 02 16:46:35 crc kubenswrapper[4747]: E1202 16:46:35.103869 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:46:35Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:46:35Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:46:35Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T16:46:35Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:6adbf9c899ee2ad98ce67a6c0ff13492e290ae78ed6f1016e0b15309abcd1988\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:a5e5e73303355d352c60d32ff983f5cd488878dc7249cb03949ec520cf99f02b\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1608275786},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1f2a98779239dfafa38d3fb89250a2691f75894c155b5c43fcc421a653bf9273\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:6a549becfb2bf10c272884c5858c442eeaa5b3eb8a726dc460b0a79d0164f7ed\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1204220237},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:3bb6e76bb2fc875de6aae6909205aad0af8b2a476f3b7e31f64d5ae8e6659572\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:54a0b5857af1053fc62860dff0f0cb8f974ab781ba9fc5722277c34ef2a16b4e\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1201277260},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:e8990432556acad31519b1a73ec32f32d27c2034cf9e5cc4db8980efc7331594\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:ebe9f523f5c211a3a0f2570331dddcd5be15b12c1fecd9b8b121f881bfaad029\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1129027903},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:35 crc kubenswrapper[4747]: E1202 16:46:35.105508 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:35 crc kubenswrapper[4747]: E1202 16:46:35.105997 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:35 crc kubenswrapper[4747]: E1202 16:46:35.106329 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:35 crc kubenswrapper[4747]: E1202 16:46:35.106591 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:35 crc kubenswrapper[4747]: E1202 16:46:35.106616 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 16:46:36 crc kubenswrapper[4747]: I1202 16:46:36.323612 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jl6rp" Dec 02 16:46:36 crc kubenswrapper[4747]: I1202 16:46:36.324038 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jl6rp" Dec 02 16:46:36 crc kubenswrapper[4747]: I1202 16:46:36.378201 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jl6rp" Dec 02 16:46:36 crc kubenswrapper[4747]: I1202 16:46:36.379219 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:36 crc kubenswrapper[4747]: I1202 16:46:36.379969 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:36 crc kubenswrapper[4747]: I1202 16:46:36.380548 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:36 crc kubenswrapper[4747]: I1202 16:46:36.380959 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:36 crc kubenswrapper[4747]: I1202 16:46:36.381293 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:36 crc kubenswrapper[4747]: I1202 16:46:36.381605 4747 status_manager.go:851] "Failed to get status for pod" podUID="6b230695-281f-405b-98c1-eb2e1470889e" pod="openshift-marketplace/certified-operators-jl6rp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-jl6rp\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:36 crc kubenswrapper[4747]: E1202 16:46:36.595974 4747 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.201:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187d73d42916407f openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 16:46:24.058687615 +0000 UTC m=+214.585576364,LastTimestamp:2025-12-02 16:46:24.058687615 +0000 UTC m=+214.585576364,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.014699 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"91e508de3c4af0f147fbc7e05387ad2564a081878e3dc816210246c7720f252a"} Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.060458 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jl6rp" Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.061181 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.061566 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.062163 4747 status_manager.go:851] "Failed to get status for pod" podUID="6b230695-281f-405b-98c1-eb2e1470889e" pod="openshift-marketplace/certified-operators-jl6rp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-jl6rp\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.062457 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.062718 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.063067 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:37 crc kubenswrapper[4747]: E1202 16:46:37.330745 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.201:6443: connect: connection refused" interval="7s" Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.760326 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.762033 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.763044 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.763700 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.764376 4747 status_manager.go:851] "Failed to get status for pod" podUID="6b230695-281f-405b-98c1-eb2e1470889e" pod="openshift-marketplace/certified-operators-jl6rp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-jl6rp\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.764854 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.765380 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.783125 4747 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="44dd404f-95d0-4464-ac0f-ead0da9a909c" Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.783172 4747 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="44dd404f-95d0-4464-ac0f-ead0da9a909c" Dec 02 16:46:37 crc kubenswrapper[4747]: E1202 16:46:37.783998 4747 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:37 crc kubenswrapper[4747]: I1202 16:46:37.784937 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.025957 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"73b8effe6a12c9cc3e8acd86f95dce40cceed872409f19fb5991bbd39f5d3665"} Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.028883 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.028967 4747 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e" exitCode=1 Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.029055 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e"} Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.029814 4747 scope.go:117] "RemoveContainer" containerID="783e3e060b45684a431a85fc31bc0fc40435fe10a3b8ffbdafb0c256b2cdb21e" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.031012 4747 status_manager.go:851] "Failed to get status for pod" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-m5zcc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.031394 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.031720 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.032331 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.032663 4747 status_manager.go:851] "Failed to get status for pod" podUID="6b230695-281f-405b-98c1-eb2e1470889e" pod="openshift-marketplace/certified-operators-jl6rp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-jl6rp\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.032975 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.033280 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.033564 4747 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.034079 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.034358 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.034660 4747 status_manager.go:851] "Failed to get status for pod" podUID="6b230695-281f-405b-98c1-eb2e1470889e" pod="openshift-marketplace/certified-operators-jl6rp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-jl6rp\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.034977 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.035311 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.035612 4747 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.035927 4747 status_manager.go:851] "Failed to get status for pod" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-m5zcc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.036236 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:38 crc kubenswrapper[4747]: I1202 16:46:38.579937 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.036863 4747 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="80566c526560a47108d171eb6202d4de6c5d3fe740c60939a5f88074f5755034" exitCode=0 Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.036945 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"80566c526560a47108d171eb6202d4de6c5d3fe740c60939a5f88074f5755034"} Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.037260 4747 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="44dd404f-95d0-4464-ac0f-ead0da9a909c" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.037295 4747 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="44dd404f-95d0-4464-ac0f-ead0da9a909c" Dec 02 16:46:39 crc kubenswrapper[4747]: E1202 16:46:39.037941 4747 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.037951 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.038326 4747 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.038659 4747 status_manager.go:851] "Failed to get status for pod" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-m5zcc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.040128 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.040454 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.040742 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.041021 4747 status_manager.go:851] "Failed to get status for pod" podUID="6b230695-281f-405b-98c1-eb2e1470889e" pod="openshift-marketplace/certified-operators-jl6rp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-jl6rp\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.041279 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.043974 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.044050 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7c0f4e75264a429fe4b737514cec06a15b14e59e18ef25f1cde4c7a34f4d9558"} Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.044891 4747 status_manager.go:851] "Failed to get status for pod" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.045489 4747 status_manager.go:851] "Failed to get status for pod" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" pod="openshift-marketplace/community-operators-rxrth" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-rxrth\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.045808 4747 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.046246 4747 status_manager.go:851] "Failed to get status for pod" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-m5zcc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.046737 4747 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.047115 4747 status_manager.go:851] "Failed to get status for pod" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" pod="openshift-marketplace/redhat-marketplace-27x57" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-27x57\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.047466 4747 status_manager.go:851] "Failed to get status for pod" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" pod="openshift-marketplace/redhat-operators-d7l68" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-d7l68\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:39 crc kubenswrapper[4747]: I1202 16:46:39.047795 4747 status_manager.go:851] "Failed to get status for pod" podUID="6b230695-281f-405b-98c1-eb2e1470889e" pod="openshift-marketplace/certified-operators-jl6rp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-jl6rp\": dial tcp 38.102.83.201:6443: connect: connection refused" Dec 02 16:46:40 crc kubenswrapper[4747]: I1202 16:46:40.048558 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:46:40 crc kubenswrapper[4747]: I1202 16:46:40.048929 4747 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 02 16:46:40 crc kubenswrapper[4747]: I1202 16:46:40.049548 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 02 16:46:40 crc kubenswrapper[4747]: I1202 16:46:40.056700 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"4ea41c6b13de8889f5618a2e789afa7fce90ff871d91f97fda198674f85e3538"} Dec 02 16:46:40 crc kubenswrapper[4747]: I1202 16:46:40.056747 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"afdbeb83f1f1e2f058ac45f5843baba3a3e4265a6ecc9b7def3b75f6e099952a"} Dec 02 16:46:40 crc kubenswrapper[4747]: I1202 16:46:40.056761 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"886fa23acd40d3d255c40c8a76cc8fa4a41bc1099792ae209cc7d922c33787d8"} Dec 02 16:46:40 crc kubenswrapper[4747]: I1202 16:46:40.382998 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:46:41 crc kubenswrapper[4747]: I1202 16:46:41.066188 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d6afd2422181deb8d738220be6021bbca27b5bd731e4794a235735b11802ed74"} Dec 02 16:46:41 crc kubenswrapper[4747]: I1202 16:46:41.066551 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b4473d4723cad19d79cf49e47d3f56248fa424e3d4581cbd8a1f0e16c6d03e99"} Dec 02 16:46:41 crc kubenswrapper[4747]: I1202 16:46:41.066722 4747 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="44dd404f-95d0-4464-ac0f-ead0da9a909c" Dec 02 16:46:41 crc kubenswrapper[4747]: I1202 16:46:41.066767 4747 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="44dd404f-95d0-4464-ac0f-ead0da9a909c" Dec 02 16:46:42 crc kubenswrapper[4747]: I1202 16:46:42.785858 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:42 crc kubenswrapper[4747]: I1202 16:46:42.785979 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:42 crc kubenswrapper[4747]: I1202 16:46:42.792921 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:46 crc kubenswrapper[4747]: I1202 16:46:46.079832 4747 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:47 crc kubenswrapper[4747]: I1202 16:46:47.100713 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:47 crc kubenswrapper[4747]: I1202 16:46:47.100773 4747 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="44dd404f-95d0-4464-ac0f-ead0da9a909c" Dec 02 16:46:47 crc kubenswrapper[4747]: I1202 16:46:47.101328 4747 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="44dd404f-95d0-4464-ac0f-ead0da9a909c" Dec 02 16:46:47 crc kubenswrapper[4747]: I1202 16:46:47.105467 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:46:47 crc kubenswrapper[4747]: I1202 16:46:47.108507 4747 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="80bb35c0-7356-4589-a963-b6bd8c3c0771" Dec 02 16:46:48 crc kubenswrapper[4747]: I1202 16:46:48.105127 4747 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="44dd404f-95d0-4464-ac0f-ead0da9a909c" Dec 02 16:46:48 crc kubenswrapper[4747]: I1202 16:46:48.105161 4747 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="44dd404f-95d0-4464-ac0f-ead0da9a909c" Dec 02 16:46:49 crc kubenswrapper[4747]: I1202 16:46:49.787764 4747 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="80bb35c0-7356-4589-a963-b6bd8c3c0771" Dec 02 16:46:50 crc kubenswrapper[4747]: I1202 16:46:50.054751 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:46:50 crc kubenswrapper[4747]: I1202 16:46:50.059338 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 16:46:55 crc kubenswrapper[4747]: I1202 16:46:55.476732 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 02 16:46:56 crc kubenswrapper[4747]: I1202 16:46:56.069472 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 02 16:46:56 crc kubenswrapper[4747]: I1202 16:46:56.363654 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 02 16:46:56 crc kubenswrapper[4747]: I1202 16:46:56.617957 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 02 16:46:56 crc kubenswrapper[4747]: I1202 16:46:56.794176 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 02 16:46:56 crc kubenswrapper[4747]: I1202 16:46:56.895153 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 02 16:46:57 crc kubenswrapper[4747]: I1202 16:46:57.071704 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 02 16:46:57 crc kubenswrapper[4747]: I1202 16:46:57.329408 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 02 16:46:57 crc kubenswrapper[4747]: I1202 16:46:57.442312 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 16:46:57 crc kubenswrapper[4747]: I1202 16:46:57.574963 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 02 16:46:57 crc kubenswrapper[4747]: I1202 16:46:57.749988 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 02 16:46:57 crc kubenswrapper[4747]: I1202 16:46:57.772957 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 16:46:57 crc kubenswrapper[4747]: I1202 16:46:57.961605 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 02 16:46:58 crc kubenswrapper[4747]: I1202 16:46:58.079843 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 02 16:46:58 crc kubenswrapper[4747]: I1202 16:46:58.108270 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 02 16:46:58 crc kubenswrapper[4747]: I1202 16:46:58.153368 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 02 16:46:58 crc kubenswrapper[4747]: I1202 16:46:58.258550 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 02 16:46:58 crc kubenswrapper[4747]: I1202 16:46:58.285013 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 02 16:46:58 crc kubenswrapper[4747]: I1202 16:46:58.308621 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 02 16:46:58 crc kubenswrapper[4747]: I1202 16:46:58.312207 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 02 16:46:58 crc kubenswrapper[4747]: I1202 16:46:58.378932 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 02 16:46:58 crc kubenswrapper[4747]: I1202 16:46:58.620618 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 02 16:46:58 crc kubenswrapper[4747]: I1202 16:46:58.731637 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 02 16:46:58 crc kubenswrapper[4747]: I1202 16:46:58.800013 4747 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 02 16:46:58 crc kubenswrapper[4747]: I1202 16:46:58.846811 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 02 16:46:58 crc kubenswrapper[4747]: I1202 16:46:58.900520 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 02 16:46:58 crc kubenswrapper[4747]: I1202 16:46:58.981939 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.112972 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.150754 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.151272 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.160733 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.181362 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.247614 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.303802 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.364445 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.372143 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.464934 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.528651 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.612869 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.616038 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.634932 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.641448 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.809646 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.853257 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 02 16:46:59 crc kubenswrapper[4747]: I1202 16:46:59.957891 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 02 16:47:00 crc kubenswrapper[4747]: I1202 16:47:00.153654 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 02 16:47:00 crc kubenswrapper[4747]: I1202 16:47:00.154722 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 02 16:47:00 crc kubenswrapper[4747]: I1202 16:47:00.202558 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 02 16:47:00 crc kubenswrapper[4747]: I1202 16:47:00.261367 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 02 16:47:00 crc kubenswrapper[4747]: I1202 16:47:00.290863 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 02 16:47:00 crc kubenswrapper[4747]: I1202 16:47:00.393189 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 02 16:47:00 crc kubenswrapper[4747]: I1202 16:47:00.578129 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 02 16:47:00 crc kubenswrapper[4747]: I1202 16:47:00.591999 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 02 16:47:00 crc kubenswrapper[4747]: I1202 16:47:00.726694 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 02 16:47:00 crc kubenswrapper[4747]: I1202 16:47:00.743544 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 02 16:47:00 crc kubenswrapper[4747]: I1202 16:47:00.817353 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.037078 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.060740 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.099174 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.110210 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.128519 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.128947 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.135200 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.152389 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.289952 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.313398 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.407953 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.420570 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.614423 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.694116 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.719039 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.730508 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.764237 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.783704 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.805390 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.836947 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.910559 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 02 16:47:01 crc kubenswrapper[4747]: I1202 16:47:01.938674 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.042625 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.083259 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.112452 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.155554 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.214337 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.303755 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.304664 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.346276 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.363209 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.471571 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.539827 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.548175 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.568220 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.644770 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.708134 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.723049 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.744022 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.753048 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.770147 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.847191 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.888361 4747 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.908073 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.940440 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 02 16:47:02 crc kubenswrapper[4747]: I1202 16:47:02.980336 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.008215 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.110756 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.138123 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.166973 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.469176 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.543440 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.543651 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.596245 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.626158 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.683997 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.684358 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.691583 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.815471 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.821997 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.849719 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.857280 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.906328 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 02 16:47:03 crc kubenswrapper[4747]: I1202 16:47:03.937076 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.007396 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.047427 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.106801 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.130652 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.163148 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.186606 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.190983 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.321116 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.328221 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.526072 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.571141 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.613617 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.614490 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.614713 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.651528 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.691513 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.711370 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.712653 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.780805 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.781989 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.790183 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.807232 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.892395 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.903858 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.932149 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.941391 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.970948 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 02 16:47:04 crc kubenswrapper[4747]: I1202 16:47:04.986431 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 02 16:47:05 crc kubenswrapper[4747]: I1202 16:47:05.155127 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 02 16:47:05 crc kubenswrapper[4747]: I1202 16:47:05.173714 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 02 16:47:05 crc kubenswrapper[4747]: I1202 16:47:05.234450 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 02 16:47:05 crc kubenswrapper[4747]: I1202 16:47:05.251158 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 02 16:47:05 crc kubenswrapper[4747]: I1202 16:47:05.262463 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 02 16:47:05 crc kubenswrapper[4747]: I1202 16:47:05.357996 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 02 16:47:05 crc kubenswrapper[4747]: I1202 16:47:05.499358 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 02 16:47:05 crc kubenswrapper[4747]: I1202 16:47:05.666790 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 02 16:47:05 crc kubenswrapper[4747]: I1202 16:47:05.791335 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 02 16:47:05 crc kubenswrapper[4747]: I1202 16:47:05.851007 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 02 16:47:05 crc kubenswrapper[4747]: I1202 16:47:05.859351 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 02 16:47:05 crc kubenswrapper[4747]: I1202 16:47:05.915399 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 02 16:47:05 crc kubenswrapper[4747]: I1202 16:47:05.949718 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 02 16:47:05 crc kubenswrapper[4747]: I1202 16:47:05.986008 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 02 16:47:06 crc kubenswrapper[4747]: I1202 16:47:06.015135 4747 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 02 16:47:06 crc kubenswrapper[4747]: I1202 16:47:06.116940 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 02 16:47:06 crc kubenswrapper[4747]: I1202 16:47:06.126307 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 02 16:47:06 crc kubenswrapper[4747]: I1202 16:47:06.182352 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 02 16:47:06 crc kubenswrapper[4747]: I1202 16:47:06.437626 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 02 16:47:06 crc kubenswrapper[4747]: I1202 16:47:06.512842 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 02 16:47:06 crc kubenswrapper[4747]: I1202 16:47:06.561757 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 02 16:47:06 crc kubenswrapper[4747]: I1202 16:47:06.680676 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 16:47:06 crc kubenswrapper[4747]: I1202 16:47:06.718737 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 02 16:47:06 crc kubenswrapper[4747]: I1202 16:47:06.766162 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 02 16:47:06 crc kubenswrapper[4747]: I1202 16:47:06.895711 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 02 16:47:06 crc kubenswrapper[4747]: I1202 16:47:06.898792 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 02 16:47:06 crc kubenswrapper[4747]: I1202 16:47:06.985181 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.160256 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.171458 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.195627 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.218235 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.276244 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.282815 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.309946 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.311271 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.327220 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.363695 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.378401 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.394815 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.405377 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.431478 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.457179 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.515815 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.521521 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.525976 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.557243 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.690955 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.707320 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.765715 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.786732 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.807826 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.832333 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 02 16:47:07 crc kubenswrapper[4747]: I1202 16:47:07.984325 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.000688 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.012886 4747 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.014354 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jl6rp" podStartSLOduration=44.6257501 podStartE2EDuration="2m43.01433794s" podCreationTimestamp="2025-12-02 16:44:25 +0000 UTC" firstStartedPulling="2025-12-02 16:44:28.593149599 +0000 UTC m=+99.120038348" lastFinishedPulling="2025-12-02 16:46:26.981737439 +0000 UTC m=+217.508626188" observedRunningTime="2025-12-02 16:46:45.837712347 +0000 UTC m=+236.364601096" watchObservedRunningTime="2025-12-02 16:47:08.01433794 +0000 UTC m=+258.541226689" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.014620 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=45.014615388 podStartE2EDuration="45.014615388s" podCreationTimestamp="2025-12-02 16:46:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:46:45.74833618 +0000 UTC m=+236.275224939" watchObservedRunningTime="2025-12-02 16:47:08.014615388 +0000 UTC m=+258.541504137" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.016296 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rxrth" podStartSLOduration=53.329305423 podStartE2EDuration="2m42.016292436s" podCreationTimestamp="2025-12-02 16:44:26 +0000 UTC" firstStartedPulling="2025-12-02 16:44:28.619120588 +0000 UTC m=+99.146009347" lastFinishedPulling="2025-12-02 16:46:17.306107611 +0000 UTC m=+207.832996360" observedRunningTime="2025-12-02 16:46:45.87156833 +0000 UTC m=+236.398457079" watchObservedRunningTime="2025-12-02 16:47:08.016292436 +0000 UTC m=+258.543181185" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.017998 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-marketplace/redhat-operators-d7l68","openshift-marketplace/redhat-marketplace-27x57"] Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.018053 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.022221 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.049250 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=22.049203401 podStartE2EDuration="22.049203401s" podCreationTimestamp="2025-12-02 16:46:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:47:08.041047797 +0000 UTC m=+258.567936546" watchObservedRunningTime="2025-12-02 16:47:08.049203401 +0000 UTC m=+258.576092150" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.175724 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.175724 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.179276 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.283139 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.296172 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.318347 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.430226 4747 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.430476 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://fe7ac99f1362c116558f4df2657ec8e50f57894dc5ce50f96e4cbd2d861f194b" gracePeriod=5 Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.440155 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.486643 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.502109 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.698627 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.729875 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.785502 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.798031 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 02 16:47:08 crc kubenswrapper[4747]: I1202 16:47:08.848770 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 02 16:47:09 crc kubenswrapper[4747]: I1202 16:47:09.119751 4747 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 02 16:47:09 crc kubenswrapper[4747]: I1202 16:47:09.288424 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 02 16:47:09 crc kubenswrapper[4747]: I1202 16:47:09.350802 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 02 16:47:09 crc kubenswrapper[4747]: I1202 16:47:09.445083 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 02 16:47:09 crc kubenswrapper[4747]: I1202 16:47:09.458380 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 02 16:47:09 crc kubenswrapper[4747]: I1202 16:47:09.519059 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 02 16:47:09 crc kubenswrapper[4747]: I1202 16:47:09.529818 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 02 16:47:09 crc kubenswrapper[4747]: I1202 16:47:09.573519 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 02 16:47:09 crc kubenswrapper[4747]: I1202 16:47:09.584898 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 02 16:47:09 crc kubenswrapper[4747]: I1202 16:47:09.637757 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 02 16:47:09 crc kubenswrapper[4747]: I1202 16:47:09.768045 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 02 16:47:09 crc kubenswrapper[4747]: I1202 16:47:09.769243 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" path="/var/lib/kubelet/pods/36ead3e1-831c-424c-8f1e-4b4213621c9a/volumes" Dec 02 16:47:09 crc kubenswrapper[4747]: I1202 16:47:09.770487 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="788ee83c-7c8c-4e57-a31e-0f658765b846" path="/var/lib/kubelet/pods/788ee83c-7c8c-4e57-a31e-0f658765b846/volumes" Dec 02 16:47:09 crc kubenswrapper[4747]: I1202 16:47:09.837041 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 16:47:09 crc kubenswrapper[4747]: I1202 16:47:09.868042 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 02 16:47:09 crc kubenswrapper[4747]: I1202 16:47:09.876561 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 16:47:09 crc kubenswrapper[4747]: I1202 16:47:09.925738 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 02 16:47:10 crc kubenswrapper[4747]: I1202 16:47:10.006962 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 02 16:47:10 crc kubenswrapper[4747]: I1202 16:47:10.058182 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 02 16:47:10 crc kubenswrapper[4747]: I1202 16:47:10.059338 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 02 16:47:10 crc kubenswrapper[4747]: I1202 16:47:10.141963 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 02 16:47:10 crc kubenswrapper[4747]: I1202 16:47:10.333208 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 02 16:47:10 crc kubenswrapper[4747]: I1202 16:47:10.730996 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 02 16:47:10 crc kubenswrapper[4747]: I1202 16:47:10.821518 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 02 16:47:10 crc kubenswrapper[4747]: I1202 16:47:10.840453 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 02 16:47:10 crc kubenswrapper[4747]: I1202 16:47:10.858479 4747 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 02 16:47:10 crc kubenswrapper[4747]: I1202 16:47:10.933840 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 02 16:47:11 crc kubenswrapper[4747]: I1202 16:47:11.178038 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 02 16:47:11 crc kubenswrapper[4747]: I1202 16:47:11.222814 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 02 16:47:11 crc kubenswrapper[4747]: I1202 16:47:11.542699 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 02 16:47:11 crc kubenswrapper[4747]: I1202 16:47:11.601017 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 02 16:47:11 crc kubenswrapper[4747]: I1202 16:47:11.660081 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 02 16:47:12 crc kubenswrapper[4747]: I1202 16:47:12.206301 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 02 16:47:13 crc kubenswrapper[4747]: I1202 16:47:13.033126 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.046052 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.046140 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.159600 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.159754 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.159820 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.159941 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.159983 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.160187 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.160306 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.160191 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.160240 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.170567 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.262061 4747 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.262390 4747 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.262435 4747 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.262445 4747 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.262454 4747 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.271064 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.271135 4747 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="fe7ac99f1362c116558f4df2657ec8e50f57894dc5ce50f96e4cbd2d861f194b" exitCode=137 Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.271200 4747 scope.go:117] "RemoveContainer" containerID="fe7ac99f1362c116558f4df2657ec8e50f57894dc5ce50f96e4cbd2d861f194b" Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.271338 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.290266 4747 scope.go:117] "RemoveContainer" containerID="fe7ac99f1362c116558f4df2657ec8e50f57894dc5ce50f96e4cbd2d861f194b" Dec 02 16:47:14 crc kubenswrapper[4747]: E1202 16:47:14.290825 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe7ac99f1362c116558f4df2657ec8e50f57894dc5ce50f96e4cbd2d861f194b\": container with ID starting with fe7ac99f1362c116558f4df2657ec8e50f57894dc5ce50f96e4cbd2d861f194b not found: ID does not exist" containerID="fe7ac99f1362c116558f4df2657ec8e50f57894dc5ce50f96e4cbd2d861f194b" Dec 02 16:47:14 crc kubenswrapper[4747]: I1202 16:47:14.290996 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe7ac99f1362c116558f4df2657ec8e50f57894dc5ce50f96e4cbd2d861f194b"} err="failed to get container status \"fe7ac99f1362c116558f4df2657ec8e50f57894dc5ce50f96e4cbd2d861f194b\": rpc error: code = NotFound desc = could not find container \"fe7ac99f1362c116558f4df2657ec8e50f57894dc5ce50f96e4cbd2d861f194b\": container with ID starting with fe7ac99f1362c116558f4df2657ec8e50f57894dc5ce50f96e4cbd2d861f194b not found: ID does not exist" Dec 02 16:47:15 crc kubenswrapper[4747]: I1202 16:47:15.768791 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 02 16:47:15 crc kubenswrapper[4747]: I1202 16:47:15.769156 4747 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 02 16:47:15 crc kubenswrapper[4747]: I1202 16:47:15.781958 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 16:47:15 crc kubenswrapper[4747]: I1202 16:47:15.782089 4747 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="930a0fff-09e4-417a-b60c-195342ec9d87" Dec 02 16:47:15 crc kubenswrapper[4747]: I1202 16:47:15.785327 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 16:47:15 crc kubenswrapper[4747]: I1202 16:47:15.785397 4747 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="930a0fff-09e4-417a-b60c-195342ec9d87" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.593570 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jl6rp"] Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.596631 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jl6rp" podUID="6b230695-281f-405b-98c1-eb2e1470889e" containerName="registry-server" containerID="cri-o://55861e680b4fb7d38b22b39cd5542da2a9e35d4c991f41e0e9c6f4a66390a1b1" gracePeriod=30 Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.605549 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vt692"] Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.606163 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vt692" podUID="b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27" containerName="registry-server" containerID="cri-o://f1a89ae4e659475ac93f445f047280ee354ece84ee4abccadc5e6b5a0ebb5121" gracePeriod=30 Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.614739 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rxrth"] Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.615090 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rxrth" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" containerName="registry-server" containerID="cri-o://fc37487d8a08e22fc213e62713c455c4fa35508916ab38602ec077a16fc45a59" gracePeriod=30 Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.626349 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vsbt4"] Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.626657 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vsbt4" podUID="362f8e5d-8e91-435a-af79-5d318c2288e0" containerName="registry-server" containerID="cri-o://7939ff0151ef0346743adb1664004371257ecf893ed4c45cdeea111337598042" gracePeriod=30 Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.648420 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-89l9h"] Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.649053 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" podUID="72d97894-0a82-427f-8376-bea96de36324" containerName="marketplace-operator" containerID="cri-o://037b3ffa4ee1bd48b74e3f3e145796e4293bea5b6699330fcb42590862c6dcc3" gracePeriod=30 Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.656087 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mvzvc"] Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.656507 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mvzvc" podUID="2fad8dbb-9212-436a-bad9-7439f27afec6" containerName="registry-server" containerID="cri-o://d06dc27d9dc508495aad4f3fe509b39572b2b51a1dfb927f20df6dee43f84878" gracePeriod=30 Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.670426 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dhq2f"] Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.670751 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dhq2f" podUID="37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" containerName="registry-server" containerID="cri-o://bb99d27f090ac99c2cc655d4b7265c9e633dd94340be09298adbff623f639c66" gracePeriod=30 Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.685765 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2kmf2"] Dec 02 16:47:24 crc kubenswrapper[4747]: E1202 16:47:24.686113 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" containerName="installer" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.686133 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" containerName="installer" Dec 02 16:47:24 crc kubenswrapper[4747]: E1202 16:47:24.686145 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" containerName="registry-server" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.686154 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" containerName="registry-server" Dec 02 16:47:24 crc kubenswrapper[4747]: E1202 16:47:24.686171 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" containerName="extract-content" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.686178 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" containerName="extract-content" Dec 02 16:47:24 crc kubenswrapper[4747]: E1202 16:47:24.686185 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.686191 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 16:47:24 crc kubenswrapper[4747]: E1202 16:47:24.686202 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" containerName="extract-utilities" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.686209 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" containerName="extract-utilities" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.686339 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f977bba4-b1bf-4915-bb71-71a4dd413d9b" containerName="installer" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.686353 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.686366 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="36ead3e1-831c-424c-8f1e-4b4213621c9a" containerName="registry-server" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.686765 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2kmf2" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.705121 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2kmf2"] Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.856103 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24gjx\" (UniqueName: \"kubernetes.io/projected/dd32d5f2-ea95-4e1f-91f5-3e245c961bd6-kube-api-access-24gjx\") pod \"marketplace-operator-79b997595-2kmf2\" (UID: \"dd32d5f2-ea95-4e1f-91f5-3e245c961bd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kmf2" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.856158 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/dd32d5f2-ea95-4e1f-91f5-3e245c961bd6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2kmf2\" (UID: \"dd32d5f2-ea95-4e1f-91f5-3e245c961bd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kmf2" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.856243 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dd32d5f2-ea95-4e1f-91f5-3e245c961bd6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2kmf2\" (UID: \"dd32d5f2-ea95-4e1f-91f5-3e245c961bd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kmf2" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.957165 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24gjx\" (UniqueName: \"kubernetes.io/projected/dd32d5f2-ea95-4e1f-91f5-3e245c961bd6-kube-api-access-24gjx\") pod \"marketplace-operator-79b997595-2kmf2\" (UID: \"dd32d5f2-ea95-4e1f-91f5-3e245c961bd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kmf2" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.957249 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/dd32d5f2-ea95-4e1f-91f5-3e245c961bd6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2kmf2\" (UID: \"dd32d5f2-ea95-4e1f-91f5-3e245c961bd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kmf2" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.957322 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dd32d5f2-ea95-4e1f-91f5-3e245c961bd6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2kmf2\" (UID: \"dd32d5f2-ea95-4e1f-91f5-3e245c961bd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kmf2" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.958864 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dd32d5f2-ea95-4e1f-91f5-3e245c961bd6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2kmf2\" (UID: \"dd32d5f2-ea95-4e1f-91f5-3e245c961bd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kmf2" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.968853 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/dd32d5f2-ea95-4e1f-91f5-3e245c961bd6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2kmf2\" (UID: \"dd32d5f2-ea95-4e1f-91f5-3e245c961bd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kmf2" Dec 02 16:47:24 crc kubenswrapper[4747]: I1202 16:47:24.982983 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24gjx\" (UniqueName: \"kubernetes.io/projected/dd32d5f2-ea95-4e1f-91f5-3e245c961bd6-kube-api-access-24gjx\") pod \"marketplace-operator-79b997595-2kmf2\" (UID: \"dd32d5f2-ea95-4e1f-91f5-3e245c961bd6\") " pod="openshift-marketplace/marketplace-operator-79b997595-2kmf2" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.007483 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2kmf2" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.236892 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.246356 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vsbt4" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.261440 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mvzvc" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.337291 4747 generic.go:334] "Generic (PLEG): container finished" podID="362f8e5d-8e91-435a-af79-5d318c2288e0" containerID="7939ff0151ef0346743adb1664004371257ecf893ed4c45cdeea111337598042" exitCode=0 Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.337382 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vsbt4" event={"ID":"362f8e5d-8e91-435a-af79-5d318c2288e0","Type":"ContainerDied","Data":"7939ff0151ef0346743adb1664004371257ecf893ed4c45cdeea111337598042"} Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.337428 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vsbt4" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.337468 4747 scope.go:117] "RemoveContainer" containerID="7939ff0151ef0346743adb1664004371257ecf893ed4c45cdeea111337598042" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.337450 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vsbt4" event={"ID":"362f8e5d-8e91-435a-af79-5d318c2288e0","Type":"ContainerDied","Data":"988f1825e1d2327195a48011c0ff846724987f20c8d8479bf06d2f80ccfffa93"} Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.344419 4747 generic.go:334] "Generic (PLEG): container finished" podID="37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" containerID="bb99d27f090ac99c2cc655d4b7265c9e633dd94340be09298adbff623f639c66" exitCode=0 Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.344918 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhq2f" event={"ID":"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553","Type":"ContainerDied","Data":"bb99d27f090ac99c2cc655d4b7265c9e633dd94340be09298adbff623f639c66"} Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.361728 4747 generic.go:334] "Generic (PLEG): container finished" podID="b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27" containerID="f1a89ae4e659475ac93f445f047280ee354ece84ee4abccadc5e6b5a0ebb5121" exitCode=0 Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.361889 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vt692" event={"ID":"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27","Type":"ContainerDied","Data":"f1a89ae4e659475ac93f445f047280ee354ece84ee4abccadc5e6b5a0ebb5121"} Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.362049 4747 scope.go:117] "RemoveContainer" containerID="b757b8915f9be10abd8ea5477ac15cec1cd1d4c99aff1708eaaca2b590120b78" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.366972 4747 generic.go:334] "Generic (PLEG): container finished" podID="6b230695-281f-405b-98c1-eb2e1470889e" containerID="55861e680b4fb7d38b22b39cd5542da2a9e35d4c991f41e0e9c6f4a66390a1b1" exitCode=0 Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.367047 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jl6rp" event={"ID":"6b230695-281f-405b-98c1-eb2e1470889e","Type":"ContainerDied","Data":"55861e680b4fb7d38b22b39cd5542da2a9e35d4c991f41e0e9c6f4a66390a1b1"} Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.367402 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fad8dbb-9212-436a-bad9-7439f27afec6-catalog-content\") pod \"2fad8dbb-9212-436a-bad9-7439f27afec6\" (UID: \"2fad8dbb-9212-436a-bad9-7439f27afec6\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.367528 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/362f8e5d-8e91-435a-af79-5d318c2288e0-utilities\") pod \"362f8e5d-8e91-435a-af79-5d318c2288e0\" (UID: \"362f8e5d-8e91-435a-af79-5d318c2288e0\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.367578 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fad8dbb-9212-436a-bad9-7439f27afec6-utilities\") pod \"2fad8dbb-9212-436a-bad9-7439f27afec6\" (UID: \"2fad8dbb-9212-436a-bad9-7439f27afec6\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.367631 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwqdt\" (UniqueName: \"kubernetes.io/projected/72d97894-0a82-427f-8376-bea96de36324-kube-api-access-hwqdt\") pod \"72d97894-0a82-427f-8376-bea96de36324\" (UID: \"72d97894-0a82-427f-8376-bea96de36324\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.367718 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/72d97894-0a82-427f-8376-bea96de36324-marketplace-trusted-ca\") pod \"72d97894-0a82-427f-8376-bea96de36324\" (UID: \"72d97894-0a82-427f-8376-bea96de36324\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.367799 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8svh\" (UniqueName: \"kubernetes.io/projected/2fad8dbb-9212-436a-bad9-7439f27afec6-kube-api-access-x8svh\") pod \"2fad8dbb-9212-436a-bad9-7439f27afec6\" (UID: \"2fad8dbb-9212-436a-bad9-7439f27afec6\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.368664 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72d97894-0a82-427f-8376-bea96de36324-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "72d97894-0a82-427f-8376-bea96de36324" (UID: "72d97894-0a82-427f-8376-bea96de36324"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.368859 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/362f8e5d-8e91-435a-af79-5d318c2288e0-catalog-content\") pod \"362f8e5d-8e91-435a-af79-5d318c2288e0\" (UID: \"362f8e5d-8e91-435a-af79-5d318c2288e0\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.368925 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/72d97894-0a82-427f-8376-bea96de36324-marketplace-operator-metrics\") pod \"72d97894-0a82-427f-8376-bea96de36324\" (UID: \"72d97894-0a82-427f-8376-bea96de36324\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.368972 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qt7j\" (UniqueName: \"kubernetes.io/projected/362f8e5d-8e91-435a-af79-5d318c2288e0-kube-api-access-7qt7j\") pod \"362f8e5d-8e91-435a-af79-5d318c2288e0\" (UID: \"362f8e5d-8e91-435a-af79-5d318c2288e0\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.369604 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/362f8e5d-8e91-435a-af79-5d318c2288e0-utilities" (OuterVolumeSpecName: "utilities") pod "362f8e5d-8e91-435a-af79-5d318c2288e0" (UID: "362f8e5d-8e91-435a-af79-5d318c2288e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.369930 4747 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/72d97894-0a82-427f-8376-bea96de36324-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.374494 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fad8dbb-9212-436a-bad9-7439f27afec6-utilities" (OuterVolumeSpecName: "utilities") pod "2fad8dbb-9212-436a-bad9-7439f27afec6" (UID: "2fad8dbb-9212-436a-bad9-7439f27afec6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.379176 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fad8dbb-9212-436a-bad9-7439f27afec6-kube-api-access-x8svh" (OuterVolumeSpecName: "kube-api-access-x8svh") pod "2fad8dbb-9212-436a-bad9-7439f27afec6" (UID: "2fad8dbb-9212-436a-bad9-7439f27afec6"). InnerVolumeSpecName "kube-api-access-x8svh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.380610 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72d97894-0a82-427f-8376-bea96de36324-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "72d97894-0a82-427f-8376-bea96de36324" (UID: "72d97894-0a82-427f-8376-bea96de36324"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.381772 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72d97894-0a82-427f-8376-bea96de36324-kube-api-access-hwqdt" (OuterVolumeSpecName: "kube-api-access-hwqdt") pod "72d97894-0a82-427f-8376-bea96de36324" (UID: "72d97894-0a82-427f-8376-bea96de36324"). InnerVolumeSpecName "kube-api-access-hwqdt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.383609 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/362f8e5d-8e91-435a-af79-5d318c2288e0-kube-api-access-7qt7j" (OuterVolumeSpecName: "kube-api-access-7qt7j") pod "362f8e5d-8e91-435a-af79-5d318c2288e0" (UID: "362f8e5d-8e91-435a-af79-5d318c2288e0"). InnerVolumeSpecName "kube-api-access-7qt7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.384216 4747 generic.go:334] "Generic (PLEG): container finished" podID="2fad8dbb-9212-436a-bad9-7439f27afec6" containerID="d06dc27d9dc508495aad4f3fe509b39572b2b51a1dfb927f20df6dee43f84878" exitCode=0 Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.384310 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mvzvc" event={"ID":"2fad8dbb-9212-436a-bad9-7439f27afec6","Type":"ContainerDied","Data":"d06dc27d9dc508495aad4f3fe509b39572b2b51a1dfb927f20df6dee43f84878"} Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.384350 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mvzvc" event={"ID":"2fad8dbb-9212-436a-bad9-7439f27afec6","Type":"ContainerDied","Data":"449caf2c9f710470b3f0ea3d14dbbc644f1323919255d893b274e169b9f336dd"} Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.384450 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mvzvc" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.390168 4747 generic.go:334] "Generic (PLEG): container finished" podID="72d97894-0a82-427f-8376-bea96de36324" containerID="037b3ffa4ee1bd48b74e3f3e145796e4293bea5b6699330fcb42590862c6dcc3" exitCode=0 Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.390269 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" event={"ID":"72d97894-0a82-427f-8376-bea96de36324","Type":"ContainerDied","Data":"037b3ffa4ee1bd48b74e3f3e145796e4293bea5b6699330fcb42590862c6dcc3"} Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.390303 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" event={"ID":"72d97894-0a82-427f-8376-bea96de36324","Type":"ContainerDied","Data":"3fdc9f9ad6f926ba1a2f5c0d600413d5f11fd12dca7e410a252619c0807f3ab8"} Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.390400 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-89l9h" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.398610 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fad8dbb-9212-436a-bad9-7439f27afec6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2fad8dbb-9212-436a-bad9-7439f27afec6" (UID: "2fad8dbb-9212-436a-bad9-7439f27afec6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.398875 4747 scope.go:117] "RemoveContainer" containerID="df9afed660d2eda8ac107058d5f1df11b470d0f14891421ab01250c84158e894" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.400764 4747 generic.go:334] "Generic (PLEG): container finished" podID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" containerID="fc37487d8a08e22fc213e62713c455c4fa35508916ab38602ec077a16fc45a59" exitCode=0 Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.400847 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxrth" event={"ID":"066ad5e3-8cd4-4a73-895d-76671a0d6aa9","Type":"ContainerDied","Data":"fc37487d8a08e22fc213e62713c455c4fa35508916ab38602ec077a16fc45a59"} Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.435799 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-89l9h"] Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.440747 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-89l9h"] Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.443077 4747 scope.go:117] "RemoveContainer" containerID="7939ff0151ef0346743adb1664004371257ecf893ed4c45cdeea111337598042" Dec 02 16:47:25 crc kubenswrapper[4747]: E1202 16:47:25.444203 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7939ff0151ef0346743adb1664004371257ecf893ed4c45cdeea111337598042\": container with ID starting with 7939ff0151ef0346743adb1664004371257ecf893ed4c45cdeea111337598042 not found: ID does not exist" containerID="7939ff0151ef0346743adb1664004371257ecf893ed4c45cdeea111337598042" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.444297 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7939ff0151ef0346743adb1664004371257ecf893ed4c45cdeea111337598042"} err="failed to get container status \"7939ff0151ef0346743adb1664004371257ecf893ed4c45cdeea111337598042\": rpc error: code = NotFound desc = could not find container \"7939ff0151ef0346743adb1664004371257ecf893ed4c45cdeea111337598042\": container with ID starting with 7939ff0151ef0346743adb1664004371257ecf893ed4c45cdeea111337598042 not found: ID does not exist" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.444394 4747 scope.go:117] "RemoveContainer" containerID="b757b8915f9be10abd8ea5477ac15cec1cd1d4c99aff1708eaaca2b590120b78" Dec 02 16:47:25 crc kubenswrapper[4747]: E1202 16:47:25.446084 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b757b8915f9be10abd8ea5477ac15cec1cd1d4c99aff1708eaaca2b590120b78\": container with ID starting with b757b8915f9be10abd8ea5477ac15cec1cd1d4c99aff1708eaaca2b590120b78 not found: ID does not exist" containerID="b757b8915f9be10abd8ea5477ac15cec1cd1d4c99aff1708eaaca2b590120b78" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.446174 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b757b8915f9be10abd8ea5477ac15cec1cd1d4c99aff1708eaaca2b590120b78"} err="failed to get container status \"b757b8915f9be10abd8ea5477ac15cec1cd1d4c99aff1708eaaca2b590120b78\": rpc error: code = NotFound desc = could not find container \"b757b8915f9be10abd8ea5477ac15cec1cd1d4c99aff1708eaaca2b590120b78\": container with ID starting with b757b8915f9be10abd8ea5477ac15cec1cd1d4c99aff1708eaaca2b590120b78 not found: ID does not exist" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.446921 4747 scope.go:117] "RemoveContainer" containerID="df9afed660d2eda8ac107058d5f1df11b470d0f14891421ab01250c84158e894" Dec 02 16:47:25 crc kubenswrapper[4747]: E1202 16:47:25.447756 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df9afed660d2eda8ac107058d5f1df11b470d0f14891421ab01250c84158e894\": container with ID starting with df9afed660d2eda8ac107058d5f1df11b470d0f14891421ab01250c84158e894 not found: ID does not exist" containerID="df9afed660d2eda8ac107058d5f1df11b470d0f14891421ab01250c84158e894" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.447809 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df9afed660d2eda8ac107058d5f1df11b470d0f14891421ab01250c84158e894"} err="failed to get container status \"df9afed660d2eda8ac107058d5f1df11b470d0f14891421ab01250c84158e894\": rpc error: code = NotFound desc = could not find container \"df9afed660d2eda8ac107058d5f1df11b470d0f14891421ab01250c84158e894\": container with ID starting with df9afed660d2eda8ac107058d5f1df11b470d0f14891421ab01250c84158e894 not found: ID does not exist" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.447884 4747 scope.go:117] "RemoveContainer" containerID="d06dc27d9dc508495aad4f3fe509b39572b2b51a1dfb927f20df6dee43f84878" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.472661 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8svh\" (UniqueName: \"kubernetes.io/projected/2fad8dbb-9212-436a-bad9-7439f27afec6-kube-api-access-x8svh\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.472709 4747 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/72d97894-0a82-427f-8376-bea96de36324-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.472723 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qt7j\" (UniqueName: \"kubernetes.io/projected/362f8e5d-8e91-435a-af79-5d318c2288e0-kube-api-access-7qt7j\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.472735 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fad8dbb-9212-436a-bad9-7439f27afec6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.472765 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/362f8e5d-8e91-435a-af79-5d318c2288e0-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.472780 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fad8dbb-9212-436a-bad9-7439f27afec6-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.472865 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwqdt\" (UniqueName: \"kubernetes.io/projected/72d97894-0a82-427f-8376-bea96de36324-kube-api-access-hwqdt\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.472890 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/362f8e5d-8e91-435a-af79-5d318c2288e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "362f8e5d-8e91-435a-af79-5d318c2288e0" (UID: "362f8e5d-8e91-435a-af79-5d318c2288e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.480233 4747 scope.go:117] "RemoveContainer" containerID="450d29df166dad4c7c29f4a155ce9e394121102dfb038dcb9c1883185bdd508a" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.485965 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2kmf2"] Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.501243 4747 scope.go:117] "RemoveContainer" containerID="4dec5878f72bba21d495145cd04c08cf7e688239e376e9de3b38354b486f6c4d" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.504865 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rxrth" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.545540 4747 scope.go:117] "RemoveContainer" containerID="d06dc27d9dc508495aad4f3fe509b39572b2b51a1dfb927f20df6dee43f84878" Dec 02 16:47:25 crc kubenswrapper[4747]: E1202 16:47:25.546040 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d06dc27d9dc508495aad4f3fe509b39572b2b51a1dfb927f20df6dee43f84878\": container with ID starting with d06dc27d9dc508495aad4f3fe509b39572b2b51a1dfb927f20df6dee43f84878 not found: ID does not exist" containerID="d06dc27d9dc508495aad4f3fe509b39572b2b51a1dfb927f20df6dee43f84878" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.546077 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d06dc27d9dc508495aad4f3fe509b39572b2b51a1dfb927f20df6dee43f84878"} err="failed to get container status \"d06dc27d9dc508495aad4f3fe509b39572b2b51a1dfb927f20df6dee43f84878\": rpc error: code = NotFound desc = could not find container \"d06dc27d9dc508495aad4f3fe509b39572b2b51a1dfb927f20df6dee43f84878\": container with ID starting with d06dc27d9dc508495aad4f3fe509b39572b2b51a1dfb927f20df6dee43f84878 not found: ID does not exist" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.546107 4747 scope.go:117] "RemoveContainer" containerID="450d29df166dad4c7c29f4a155ce9e394121102dfb038dcb9c1883185bdd508a" Dec 02 16:47:25 crc kubenswrapper[4747]: E1202 16:47:25.546458 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"450d29df166dad4c7c29f4a155ce9e394121102dfb038dcb9c1883185bdd508a\": container with ID starting with 450d29df166dad4c7c29f4a155ce9e394121102dfb038dcb9c1883185bdd508a not found: ID does not exist" containerID="450d29df166dad4c7c29f4a155ce9e394121102dfb038dcb9c1883185bdd508a" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.546539 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"450d29df166dad4c7c29f4a155ce9e394121102dfb038dcb9c1883185bdd508a"} err="failed to get container status \"450d29df166dad4c7c29f4a155ce9e394121102dfb038dcb9c1883185bdd508a\": rpc error: code = NotFound desc = could not find container \"450d29df166dad4c7c29f4a155ce9e394121102dfb038dcb9c1883185bdd508a\": container with ID starting with 450d29df166dad4c7c29f4a155ce9e394121102dfb038dcb9c1883185bdd508a not found: ID does not exist" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.546606 4747 scope.go:117] "RemoveContainer" containerID="4dec5878f72bba21d495145cd04c08cf7e688239e376e9de3b38354b486f6c4d" Dec 02 16:47:25 crc kubenswrapper[4747]: E1202 16:47:25.547154 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4dec5878f72bba21d495145cd04c08cf7e688239e376e9de3b38354b486f6c4d\": container with ID starting with 4dec5878f72bba21d495145cd04c08cf7e688239e376e9de3b38354b486f6c4d not found: ID does not exist" containerID="4dec5878f72bba21d495145cd04c08cf7e688239e376e9de3b38354b486f6c4d" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.547188 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dec5878f72bba21d495145cd04c08cf7e688239e376e9de3b38354b486f6c4d"} err="failed to get container status \"4dec5878f72bba21d495145cd04c08cf7e688239e376e9de3b38354b486f6c4d\": rpc error: code = NotFound desc = could not find container \"4dec5878f72bba21d495145cd04c08cf7e688239e376e9de3b38354b486f6c4d\": container with ID starting with 4dec5878f72bba21d495145cd04c08cf7e688239e376e9de3b38354b486f6c4d not found: ID does not exist" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.547214 4747 scope.go:117] "RemoveContainer" containerID="037b3ffa4ee1bd48b74e3f3e145796e4293bea5b6699330fcb42590862c6dcc3" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.565392 4747 scope.go:117] "RemoveContainer" containerID="037b3ffa4ee1bd48b74e3f3e145796e4293bea5b6699330fcb42590862c6dcc3" Dec 02 16:47:25 crc kubenswrapper[4747]: E1202 16:47:25.567115 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"037b3ffa4ee1bd48b74e3f3e145796e4293bea5b6699330fcb42590862c6dcc3\": container with ID starting with 037b3ffa4ee1bd48b74e3f3e145796e4293bea5b6699330fcb42590862c6dcc3 not found: ID does not exist" containerID="037b3ffa4ee1bd48b74e3f3e145796e4293bea5b6699330fcb42590862c6dcc3" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.567213 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"037b3ffa4ee1bd48b74e3f3e145796e4293bea5b6699330fcb42590862c6dcc3"} err="failed to get container status \"037b3ffa4ee1bd48b74e3f3e145796e4293bea5b6699330fcb42590862c6dcc3\": rpc error: code = NotFound desc = could not find container \"037b3ffa4ee1bd48b74e3f3e145796e4293bea5b6699330fcb42590862c6dcc3\": container with ID starting with 037b3ffa4ee1bd48b74e3f3e145796e4293bea5b6699330fcb42590862c6dcc3 not found: ID does not exist" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.573866 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/362f8e5d-8e91-435a-af79-5d318c2288e0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.603176 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vt692" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.618448 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dhq2f" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.642177 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jl6rp" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.675749 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rvbw\" (UniqueName: \"kubernetes.io/projected/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-kube-api-access-9rvbw\") pod \"066ad5e3-8cd4-4a73-895d-76671a0d6aa9\" (UID: \"066ad5e3-8cd4-4a73-895d-76671a0d6aa9\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.675842 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-utilities\") pod \"066ad5e3-8cd4-4a73-895d-76671a0d6aa9\" (UID: \"066ad5e3-8cd4-4a73-895d-76671a0d6aa9\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.675922 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-catalog-content\") pod \"066ad5e3-8cd4-4a73-895d-76671a0d6aa9\" (UID: \"066ad5e3-8cd4-4a73-895d-76671a0d6aa9\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.677499 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-utilities" (OuterVolumeSpecName: "utilities") pod "066ad5e3-8cd4-4a73-895d-76671a0d6aa9" (UID: "066ad5e3-8cd4-4a73-895d-76671a0d6aa9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.709281 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-kube-api-access-9rvbw" (OuterVolumeSpecName: "kube-api-access-9rvbw") pod "066ad5e3-8cd4-4a73-895d-76671a0d6aa9" (UID: "066ad5e3-8cd4-4a73-895d-76671a0d6aa9"). InnerVolumeSpecName "kube-api-access-9rvbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.777265 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b230695-281f-405b-98c1-eb2e1470889e-utilities\") pod \"6b230695-281f-405b-98c1-eb2e1470889e\" (UID: \"6b230695-281f-405b-98c1-eb2e1470889e\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.777348 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-catalog-content\") pod \"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27\" (UID: \"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.777376 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-catalog-content\") pod \"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553\" (UID: \"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.778883 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "066ad5e3-8cd4-4a73-895d-76671a0d6aa9" (UID: "066ad5e3-8cd4-4a73-895d-76671a0d6aa9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.779020 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b230695-281f-405b-98c1-eb2e1470889e-utilities" (OuterVolumeSpecName: "utilities") pod "6b230695-281f-405b-98c1-eb2e1470889e" (UID: "6b230695-281f-405b-98c1-eb2e1470889e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.779613 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72d97894-0a82-427f-8376-bea96de36324" path="/var/lib/kubelet/pods/72d97894-0a82-427f-8376-bea96de36324/volumes" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.782329 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rb9hs\" (UniqueName: \"kubernetes.io/projected/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-kube-api-access-rb9hs\") pod \"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553\" (UID: \"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.782497 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqd6r\" (UniqueName: \"kubernetes.io/projected/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-kube-api-access-hqd6r\") pod \"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27\" (UID: \"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.782663 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b230695-281f-405b-98c1-eb2e1470889e-catalog-content\") pod \"6b230695-281f-405b-98c1-eb2e1470889e\" (UID: \"6b230695-281f-405b-98c1-eb2e1470889e\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.783296 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-utilities\") pod \"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553\" (UID: \"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.783443 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q5c76\" (UniqueName: \"kubernetes.io/projected/6b230695-281f-405b-98c1-eb2e1470889e-kube-api-access-q5c76\") pod \"6b230695-281f-405b-98c1-eb2e1470889e\" (UID: \"6b230695-281f-405b-98c1-eb2e1470889e\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.783480 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-utilities\") pod \"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27\" (UID: \"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27\") " Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.785267 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rvbw\" (UniqueName: \"kubernetes.io/projected/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-kube-api-access-9rvbw\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.785290 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.785300 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/066ad5e3-8cd4-4a73-895d-76671a0d6aa9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.785410 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b230695-281f-405b-98c1-eb2e1470889e-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.787006 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-kube-api-access-rb9hs" (OuterVolumeSpecName: "kube-api-access-rb9hs") pod "37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" (UID: "37641b5c-f1c3-490e-8b8c-8d7ee2ea4553"). InnerVolumeSpecName "kube-api-access-rb9hs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.787714 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b230695-281f-405b-98c1-eb2e1470889e-kube-api-access-q5c76" (OuterVolumeSpecName: "kube-api-access-q5c76") pod "6b230695-281f-405b-98c1-eb2e1470889e" (UID: "6b230695-281f-405b-98c1-eb2e1470889e"). InnerVolumeSpecName "kube-api-access-q5c76". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.787850 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-utilities" (OuterVolumeSpecName: "utilities") pod "b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27" (UID: "b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.788012 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-kube-api-access-hqd6r" (OuterVolumeSpecName: "kube-api-access-hqd6r") pod "b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27" (UID: "b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27"). InnerVolumeSpecName "kube-api-access-hqd6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.787547 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-utilities" (OuterVolumeSpecName: "utilities") pod "37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" (UID: "37641b5c-f1c3-490e-8b8c-8d7ee2ea4553"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.822941 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vsbt4"] Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.823418 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vsbt4"] Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.823437 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mvzvc"] Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.823447 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mvzvc"] Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.855691 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b230695-281f-405b-98c1-eb2e1470889e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6b230695-281f-405b-98c1-eb2e1470889e" (UID: "6b230695-281f-405b-98c1-eb2e1470889e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.861030 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27" (UID: "b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.886462 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.886509 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q5c76\" (UniqueName: \"kubernetes.io/projected/6b230695-281f-405b-98c1-eb2e1470889e-kube-api-access-q5c76\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.886523 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.886535 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.886547 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rb9hs\" (UniqueName: \"kubernetes.io/projected/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-kube-api-access-rb9hs\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.886559 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqd6r\" (UniqueName: \"kubernetes.io/projected/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27-kube-api-access-hqd6r\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.886571 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b230695-281f-405b-98c1-eb2e1470889e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.924685 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" (UID: "37641b5c-f1c3-490e-8b8c-8d7ee2ea4553"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:47:25 crc kubenswrapper[4747]: I1202 16:47:25.987279 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.407707 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jl6rp" event={"ID":"6b230695-281f-405b-98c1-eb2e1470889e","Type":"ContainerDied","Data":"3cf48737995dc3169e7886dae4a5e1b49c5966864caf02a60106d781f760193e"} Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.407778 4747 scope.go:117] "RemoveContainer" containerID="55861e680b4fb7d38b22b39cd5542da2a9e35d4c991f41e0e9c6f4a66390a1b1" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.407791 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jl6rp" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.412484 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rxrth" event={"ID":"066ad5e3-8cd4-4a73-895d-76671a0d6aa9","Type":"ContainerDied","Data":"3a5e6fa8b64c7f2a5e1877792dd94f0c62fb0b6c962af503649a7a9e44d45dae"} Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.412560 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rxrth" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.421973 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2kmf2" event={"ID":"dd32d5f2-ea95-4e1f-91f5-3e245c961bd6","Type":"ContainerStarted","Data":"d2aab92976fb288b75dfa9a23faebc401c7d47ddeff38544f94c1a018951badd"} Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.422044 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2kmf2" event={"ID":"dd32d5f2-ea95-4e1f-91f5-3e245c961bd6","Type":"ContainerStarted","Data":"47591fb8dbaa76bc0bb2b61431861ae7aac47cdcc89f869dc9697abbfcdc89c6"} Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.422214 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-2kmf2" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.426048 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-2kmf2" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.431435 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dhq2f" event={"ID":"37641b5c-f1c3-490e-8b8c-8d7ee2ea4553","Type":"ContainerDied","Data":"8723fa04857b53daaf8b7a280ead579c0435ead1e9d6fa496baf91ce8b0ad9f0"} Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.431601 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dhq2f" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.438553 4747 scope.go:117] "RemoveContainer" containerID="b15752ba69a141a903f0e8c84c16fe6e1350a212ebfcb37060a935a6f98709f1" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.438790 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rxrth"] Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.443139 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rxrth"] Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.451100 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-2kmf2" podStartSLOduration=2.4510810579999998 podStartE2EDuration="2.451081058s" podCreationTimestamp="2025-12-02 16:47:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:47:26.449512699 +0000 UTC m=+276.976401448" watchObservedRunningTime="2025-12-02 16:47:26.451081058 +0000 UTC m=+276.977969797" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.471528 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vt692" event={"ID":"b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27","Type":"ContainerDied","Data":"cb99dd5de6c2b502452a8e777f045c09d705abc1522ecde28ec463e769a8d5e4"} Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.471660 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vt692" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.476916 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jl6rp"] Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.481384 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jl6rp"] Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.488491 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dhq2f"] Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.489751 4747 scope.go:117] "RemoveContainer" containerID="9be29bcaf21b8841aef7d58968b0b6bbc6c2687a51e0c8933b96234af3c7807b" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.493334 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dhq2f"] Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.510823 4747 scope.go:117] "RemoveContainer" containerID="fc37487d8a08e22fc213e62713c455c4fa35508916ab38602ec077a16fc45a59" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.534446 4747 scope.go:117] "RemoveContainer" containerID="a4ca2f81180147b22fd8bb4d7190e2e2cdacaa72fb5afb97e2928d6cbf452c03" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.537257 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vt692"] Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.544842 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vt692"] Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.561615 4747 scope.go:117] "RemoveContainer" containerID="8c5fd170f1381392c3f753f7cf02c3689320dc3755530a301a07b9bdde320573" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.575592 4747 scope.go:117] "RemoveContainer" containerID="bb99d27f090ac99c2cc655d4b7265c9e633dd94340be09298adbff623f639c66" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.592526 4747 scope.go:117] "RemoveContainer" containerID="db9ec764f05263f842db59c01805a73d0ab73d6047c8d0a3303d548359af51e0" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.609779 4747 scope.go:117] "RemoveContainer" containerID="67cce7ffe76d2c557eef7161c48623ad61a57275334c9657d245107aaf035cf8" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.624388 4747 scope.go:117] "RemoveContainer" containerID="f1a89ae4e659475ac93f445f047280ee354ece84ee4abccadc5e6b5a0ebb5121" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.643256 4747 scope.go:117] "RemoveContainer" containerID="1b18dddcc9ac068b154d31763d5ebb4d774738a4502a043f90f8f7a788811522" Dec 02 16:47:26 crc kubenswrapper[4747]: I1202 16:47:26.663942 4747 scope.go:117] "RemoveContainer" containerID="ff48e4d332b88d5cd2e25172f2e3ab18fb858d634d0df71318db690cf87881ed" Dec 02 16:47:27 crc kubenswrapper[4747]: I1202 16:47:27.772072 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" path="/var/lib/kubelet/pods/066ad5e3-8cd4-4a73-895d-76671a0d6aa9/volumes" Dec 02 16:47:27 crc kubenswrapper[4747]: I1202 16:47:27.773406 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fad8dbb-9212-436a-bad9-7439f27afec6" path="/var/lib/kubelet/pods/2fad8dbb-9212-436a-bad9-7439f27afec6/volumes" Dec 02 16:47:27 crc kubenswrapper[4747]: I1202 16:47:27.774041 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="362f8e5d-8e91-435a-af79-5d318c2288e0" path="/var/lib/kubelet/pods/362f8e5d-8e91-435a-af79-5d318c2288e0/volumes" Dec 02 16:47:27 crc kubenswrapper[4747]: I1202 16:47:27.775128 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" path="/var/lib/kubelet/pods/37641b5c-f1c3-490e-8b8c-8d7ee2ea4553/volumes" Dec 02 16:47:27 crc kubenswrapper[4747]: I1202 16:47:27.775819 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b230695-281f-405b-98c1-eb2e1470889e" path="/var/lib/kubelet/pods/6b230695-281f-405b-98c1-eb2e1470889e/volumes" Dec 02 16:47:27 crc kubenswrapper[4747]: I1202 16:47:27.776890 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27" path="/var/lib/kubelet/pods/b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27/volumes" Dec 02 16:47:30 crc kubenswrapper[4747]: I1202 16:47:30.570493 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 02 16:47:33 crc kubenswrapper[4747]: I1202 16:47:33.721233 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ddmtz"] Dec 02 16:47:33 crc kubenswrapper[4747]: I1202 16:47:33.722057 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" podUID="8bfc4a5a-ce30-4a01-b960-98eb74cbfe67" containerName="controller-manager" containerID="cri-o://3099eb42bb2098850abcd3885f7ebebee14f5f17c91dc6a146cec7fb34810032" gracePeriod=30 Dec 02 16:47:33 crc kubenswrapper[4747]: I1202 16:47:33.820533 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7"] Dec 02 16:47:33 crc kubenswrapper[4747]: I1202 16:47:33.821139 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" podUID="ea23bfbb-080b-479c-8971-594045cdc2a4" containerName="route-controller-manager" containerID="cri-o://59fcd9d532bdebdfba5c40945814218e3549ebaead87e49be862a7f95f2abc4a" gracePeriod=30 Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.524239 4747 generic.go:334] "Generic (PLEG): container finished" podID="ea23bfbb-080b-479c-8971-594045cdc2a4" containerID="59fcd9d532bdebdfba5c40945814218e3549ebaead87e49be862a7f95f2abc4a" exitCode=0 Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.524334 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" event={"ID":"ea23bfbb-080b-479c-8971-594045cdc2a4","Type":"ContainerDied","Data":"59fcd9d532bdebdfba5c40945814218e3549ebaead87e49be862a7f95f2abc4a"} Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.524948 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" event={"ID":"ea23bfbb-080b-479c-8971-594045cdc2a4","Type":"ContainerDied","Data":"3375b4576e8bbcc9d2267ca0bf6010c60441f96040c97347dd2fb37e06007bc5"} Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.524977 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3375b4576e8bbcc9d2267ca0bf6010c60441f96040c97347dd2fb37e06007bc5" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.528016 4747 generic.go:334] "Generic (PLEG): container finished" podID="8bfc4a5a-ce30-4a01-b960-98eb74cbfe67" containerID="3099eb42bb2098850abcd3885f7ebebee14f5f17c91dc6a146cec7fb34810032" exitCode=0 Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.528317 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" event={"ID":"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67","Type":"ContainerDied","Data":"3099eb42bb2098850abcd3885f7ebebee14f5f17c91dc6a146cec7fb34810032"} Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.528771 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" event={"ID":"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67","Type":"ContainerDied","Data":"93581492673f38b15b2923d7c7c67cc42d5c62fd68e15e484435c2e98e18a0ed"} Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.529096 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93581492673f38b15b2923d7c7c67cc42d5c62fd68e15e484435c2e98e18a0ed" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.593267 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.603604 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.708867 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzg2m\" (UniqueName: \"kubernetes.io/projected/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-kube-api-access-lzg2m\") pod \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.709424 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-proxy-ca-bundles\") pod \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.709458 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbvn8\" (UniqueName: \"kubernetes.io/projected/ea23bfbb-080b-479c-8971-594045cdc2a4-kube-api-access-cbvn8\") pod \"ea23bfbb-080b-479c-8971-594045cdc2a4\" (UID: \"ea23bfbb-080b-479c-8971-594045cdc2a4\") " Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.709500 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-config\") pod \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.709539 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea23bfbb-080b-479c-8971-594045cdc2a4-config\") pod \"ea23bfbb-080b-479c-8971-594045cdc2a4\" (UID: \"ea23bfbb-080b-479c-8971-594045cdc2a4\") " Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.709568 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea23bfbb-080b-479c-8971-594045cdc2a4-serving-cert\") pod \"ea23bfbb-080b-479c-8971-594045cdc2a4\" (UID: \"ea23bfbb-080b-479c-8971-594045cdc2a4\") " Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.709599 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-serving-cert\") pod \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.709627 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea23bfbb-080b-479c-8971-594045cdc2a4-client-ca\") pod \"ea23bfbb-080b-479c-8971-594045cdc2a4\" (UID: \"ea23bfbb-080b-479c-8971-594045cdc2a4\") " Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.709662 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-client-ca\") pod \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\" (UID: \"8bfc4a5a-ce30-4a01-b960-98eb74cbfe67\") " Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.711178 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-client-ca" (OuterVolumeSpecName: "client-ca") pod "8bfc4a5a-ce30-4a01-b960-98eb74cbfe67" (UID: "8bfc4a5a-ce30-4a01-b960-98eb74cbfe67"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.714751 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "8bfc4a5a-ce30-4a01-b960-98eb74cbfe67" (UID: "8bfc4a5a-ce30-4a01-b960-98eb74cbfe67"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.717753 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea23bfbb-080b-479c-8971-594045cdc2a4-client-ca" (OuterVolumeSpecName: "client-ca") pod "ea23bfbb-080b-479c-8971-594045cdc2a4" (UID: "ea23bfbb-080b-479c-8971-594045cdc2a4"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.718157 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-config" (OuterVolumeSpecName: "config") pod "8bfc4a5a-ce30-4a01-b960-98eb74cbfe67" (UID: "8bfc4a5a-ce30-4a01-b960-98eb74cbfe67"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.719789 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea23bfbb-080b-479c-8971-594045cdc2a4-config" (OuterVolumeSpecName: "config") pod "ea23bfbb-080b-479c-8971-594045cdc2a4" (UID: "ea23bfbb-080b-479c-8971-594045cdc2a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.722060 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea23bfbb-080b-479c-8971-594045cdc2a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ea23bfbb-080b-479c-8971-594045cdc2a4" (UID: "ea23bfbb-080b-479c-8971-594045cdc2a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.722105 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-kube-api-access-lzg2m" (OuterVolumeSpecName: "kube-api-access-lzg2m") pod "8bfc4a5a-ce30-4a01-b960-98eb74cbfe67" (UID: "8bfc4a5a-ce30-4a01-b960-98eb74cbfe67"). InnerVolumeSpecName "kube-api-access-lzg2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.722054 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea23bfbb-080b-479c-8971-594045cdc2a4-kube-api-access-cbvn8" (OuterVolumeSpecName: "kube-api-access-cbvn8") pod "ea23bfbb-080b-479c-8971-594045cdc2a4" (UID: "ea23bfbb-080b-479c-8971-594045cdc2a4"). InnerVolumeSpecName "kube-api-access-cbvn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.722221 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8bfc4a5a-ce30-4a01-b960-98eb74cbfe67" (UID: "8bfc4a5a-ce30-4a01-b960-98eb74cbfe67"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.811563 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzg2m\" (UniqueName: \"kubernetes.io/projected/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-kube-api-access-lzg2m\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.811934 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.812016 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbvn8\" (UniqueName: \"kubernetes.io/projected/ea23bfbb-080b-479c-8971-594045cdc2a4-kube-api-access-cbvn8\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.812133 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.812199 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea23bfbb-080b-479c-8971-594045cdc2a4-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.812263 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ea23bfbb-080b-479c-8971-594045cdc2a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.812329 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.812386 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ea23bfbb-080b-479c-8971-594045cdc2a4-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:34 crc kubenswrapper[4747]: I1202 16:47:34.812444 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.533687 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.533740 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-ddmtz" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.553679 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-55fdb685f9-2m6jc"] Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575310 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" containerName="extract-utilities" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575378 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" containerName="extract-utilities" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575419 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea23bfbb-080b-479c-8971-594045cdc2a4" containerName="route-controller-manager" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575430 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea23bfbb-080b-479c-8971-594045cdc2a4" containerName="route-controller-manager" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575443 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fad8dbb-9212-436a-bad9-7439f27afec6" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575459 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fad8dbb-9212-436a-bad9-7439f27afec6" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575469 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575479 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575538 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362f8e5d-8e91-435a-af79-5d318c2288e0" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575553 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="362f8e5d-8e91-435a-af79-5d318c2288e0" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575566 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bfc4a5a-ce30-4a01-b960-98eb74cbfe67" containerName="controller-manager" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575580 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bfc4a5a-ce30-4a01-b960-98eb74cbfe67" containerName="controller-manager" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575598 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b230695-281f-405b-98c1-eb2e1470889e" containerName="extract-content" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575608 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b230695-281f-405b-98c1-eb2e1470889e" containerName="extract-content" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575626 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27" containerName="extract-content" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575636 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27" containerName="extract-content" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575659 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362f8e5d-8e91-435a-af79-5d318c2288e0" containerName="extract-utilities" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575668 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="362f8e5d-8e91-435a-af79-5d318c2288e0" containerName="extract-utilities" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575680 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" containerName="extract-content" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575689 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" containerName="extract-content" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575705 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="362f8e5d-8e91-435a-af79-5d318c2288e0" containerName="extract-content" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575715 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="362f8e5d-8e91-435a-af79-5d318c2288e0" containerName="extract-content" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575736 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fad8dbb-9212-436a-bad9-7439f27afec6" containerName="extract-content" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575746 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fad8dbb-9212-436a-bad9-7439f27afec6" containerName="extract-content" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575766 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fad8dbb-9212-436a-bad9-7439f27afec6" containerName="extract-utilities" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575776 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fad8dbb-9212-436a-bad9-7439f27afec6" containerName="extract-utilities" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575797 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27" containerName="extract-utilities" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575805 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27" containerName="extract-utilities" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575824 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" containerName="extract-utilities" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575835 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" containerName="extract-utilities" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575849 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" containerName="extract-content" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575864 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" containerName="extract-content" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575887 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575898 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.575973 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b230695-281f-405b-98c1-eb2e1470889e" containerName="extract-utilities" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.575983 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b230695-281f-405b-98c1-eb2e1470889e" containerName="extract-utilities" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.576001 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b230695-281f-405b-98c1-eb2e1470889e" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.576010 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b230695-281f-405b-98c1-eb2e1470889e" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.576031 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.576591 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: E1202 16:47:35.576633 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72d97894-0a82-427f-8376-bea96de36324" containerName="marketplace-operator" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.576645 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="72d97894-0a82-427f-8376-bea96de36324" containerName="marketplace-operator" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.577059 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="37641b5c-f1c3-490e-8b8c-8d7ee2ea4553" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.577108 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="362f8e5d-8e91-435a-af79-5d318c2288e0" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.577121 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="72d97894-0a82-427f-8376-bea96de36324" containerName="marketplace-operator" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.577139 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fad8dbb-9212-436a-bad9-7439f27afec6" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.577159 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea23bfbb-080b-479c-8971-594045cdc2a4" containerName="route-controller-manager" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.577178 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bfc4a5a-ce30-4a01-b960-98eb74cbfe67" containerName="controller-manager" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.577195 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1c1a9cf-ead8-4ff7-8ba8-8142b96e5e27" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.577214 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="066ad5e3-8cd4-4a73-895d-76671a0d6aa9" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.577227 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b230695-281f-405b-98c1-eb2e1470889e" containerName="registry-server" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.578008 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.583266 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.583432 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.584735 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.584972 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.584973 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.585093 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd"] Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.585542 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.586679 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.592063 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.598288 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.598485 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.598488 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.598671 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.599825 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.600174 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-55fdb685f9-2m6jc"] Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.602986 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.605394 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd"] Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.610889 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ddmtz"] Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.615785 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ddmtz"] Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.658861 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7"] Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.667403 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8n5g7"] Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.724723 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6b1664b5-ee6e-4766-a8c9-f28dff7616e9-client-ca\") pod \"controller-manager-55fdb685f9-2m6jc\" (UID: \"6b1664b5-ee6e-4766-a8c9-f28dff7616e9\") " pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.724816 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xf888\" (UniqueName: \"kubernetes.io/projected/972f09f4-09ea-43b2-9255-9c80819873a9-kube-api-access-xf888\") pod \"route-controller-manager-59b55d474f-sg4fd\" (UID: \"972f09f4-09ea-43b2-9255-9c80819873a9\") " pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.724850 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b1664b5-ee6e-4766-a8c9-f28dff7616e9-config\") pod \"controller-manager-55fdb685f9-2m6jc\" (UID: \"6b1664b5-ee6e-4766-a8c9-f28dff7616e9\") " pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.724892 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/972f09f4-09ea-43b2-9255-9c80819873a9-config\") pod \"route-controller-manager-59b55d474f-sg4fd\" (UID: \"972f09f4-09ea-43b2-9255-9c80819873a9\") " pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.724936 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6b1664b5-ee6e-4766-a8c9-f28dff7616e9-proxy-ca-bundles\") pod \"controller-manager-55fdb685f9-2m6jc\" (UID: \"6b1664b5-ee6e-4766-a8c9-f28dff7616e9\") " pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.725166 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/972f09f4-09ea-43b2-9255-9c80819873a9-client-ca\") pod \"route-controller-manager-59b55d474f-sg4fd\" (UID: \"972f09f4-09ea-43b2-9255-9c80819873a9\") " pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.725244 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrqvm\" (UniqueName: \"kubernetes.io/projected/6b1664b5-ee6e-4766-a8c9-f28dff7616e9-kube-api-access-zrqvm\") pod \"controller-manager-55fdb685f9-2m6jc\" (UID: \"6b1664b5-ee6e-4766-a8c9-f28dff7616e9\") " pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.725322 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b1664b5-ee6e-4766-a8c9-f28dff7616e9-serving-cert\") pod \"controller-manager-55fdb685f9-2m6jc\" (UID: \"6b1664b5-ee6e-4766-a8c9-f28dff7616e9\") " pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.725377 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/972f09f4-09ea-43b2-9255-9c80819873a9-serving-cert\") pod \"route-controller-manager-59b55d474f-sg4fd\" (UID: \"972f09f4-09ea-43b2-9255-9c80819873a9\") " pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.769301 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bfc4a5a-ce30-4a01-b960-98eb74cbfe67" path="/var/lib/kubelet/pods/8bfc4a5a-ce30-4a01-b960-98eb74cbfe67/volumes" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.770299 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea23bfbb-080b-479c-8971-594045cdc2a4" path="/var/lib/kubelet/pods/ea23bfbb-080b-479c-8971-594045cdc2a4/volumes" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.826328 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrqvm\" (UniqueName: \"kubernetes.io/projected/6b1664b5-ee6e-4766-a8c9-f28dff7616e9-kube-api-access-zrqvm\") pod \"controller-manager-55fdb685f9-2m6jc\" (UID: \"6b1664b5-ee6e-4766-a8c9-f28dff7616e9\") " pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.826694 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b1664b5-ee6e-4766-a8c9-f28dff7616e9-serving-cert\") pod \"controller-manager-55fdb685f9-2m6jc\" (UID: \"6b1664b5-ee6e-4766-a8c9-f28dff7616e9\") " pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.826724 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/972f09f4-09ea-43b2-9255-9c80819873a9-serving-cert\") pod \"route-controller-manager-59b55d474f-sg4fd\" (UID: \"972f09f4-09ea-43b2-9255-9c80819873a9\") " pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.826759 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6b1664b5-ee6e-4766-a8c9-f28dff7616e9-client-ca\") pod \"controller-manager-55fdb685f9-2m6jc\" (UID: \"6b1664b5-ee6e-4766-a8c9-f28dff7616e9\") " pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.826785 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xf888\" (UniqueName: \"kubernetes.io/projected/972f09f4-09ea-43b2-9255-9c80819873a9-kube-api-access-xf888\") pod \"route-controller-manager-59b55d474f-sg4fd\" (UID: \"972f09f4-09ea-43b2-9255-9c80819873a9\") " pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.826805 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b1664b5-ee6e-4766-a8c9-f28dff7616e9-config\") pod \"controller-manager-55fdb685f9-2m6jc\" (UID: \"6b1664b5-ee6e-4766-a8c9-f28dff7616e9\") " pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.826837 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/972f09f4-09ea-43b2-9255-9c80819873a9-config\") pod \"route-controller-manager-59b55d474f-sg4fd\" (UID: \"972f09f4-09ea-43b2-9255-9c80819873a9\") " pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.826854 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6b1664b5-ee6e-4766-a8c9-f28dff7616e9-proxy-ca-bundles\") pod \"controller-manager-55fdb685f9-2m6jc\" (UID: \"6b1664b5-ee6e-4766-a8c9-f28dff7616e9\") " pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.826884 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/972f09f4-09ea-43b2-9255-9c80819873a9-client-ca\") pod \"route-controller-manager-59b55d474f-sg4fd\" (UID: \"972f09f4-09ea-43b2-9255-9c80819873a9\") " pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.827951 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/972f09f4-09ea-43b2-9255-9c80819873a9-client-ca\") pod \"route-controller-manager-59b55d474f-sg4fd\" (UID: \"972f09f4-09ea-43b2-9255-9c80819873a9\") " pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.828376 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6b1664b5-ee6e-4766-a8c9-f28dff7616e9-proxy-ca-bundles\") pod \"controller-manager-55fdb685f9-2m6jc\" (UID: \"6b1664b5-ee6e-4766-a8c9-f28dff7616e9\") " pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.828821 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b1664b5-ee6e-4766-a8c9-f28dff7616e9-config\") pod \"controller-manager-55fdb685f9-2m6jc\" (UID: \"6b1664b5-ee6e-4766-a8c9-f28dff7616e9\") " pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.829094 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6b1664b5-ee6e-4766-a8c9-f28dff7616e9-client-ca\") pod \"controller-manager-55fdb685f9-2m6jc\" (UID: \"6b1664b5-ee6e-4766-a8c9-f28dff7616e9\") " pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.829386 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/972f09f4-09ea-43b2-9255-9c80819873a9-config\") pod \"route-controller-manager-59b55d474f-sg4fd\" (UID: \"972f09f4-09ea-43b2-9255-9c80819873a9\") " pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.836068 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/972f09f4-09ea-43b2-9255-9c80819873a9-serving-cert\") pod \"route-controller-manager-59b55d474f-sg4fd\" (UID: \"972f09f4-09ea-43b2-9255-9c80819873a9\") " pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.839996 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b1664b5-ee6e-4766-a8c9-f28dff7616e9-serving-cert\") pod \"controller-manager-55fdb685f9-2m6jc\" (UID: \"6b1664b5-ee6e-4766-a8c9-f28dff7616e9\") " pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.861696 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrqvm\" (UniqueName: \"kubernetes.io/projected/6b1664b5-ee6e-4766-a8c9-f28dff7616e9-kube-api-access-zrqvm\") pod \"controller-manager-55fdb685f9-2m6jc\" (UID: \"6b1664b5-ee6e-4766-a8c9-f28dff7616e9\") " pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.863220 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xf888\" (UniqueName: \"kubernetes.io/projected/972f09f4-09ea-43b2-9255-9c80819873a9-kube-api-access-xf888\") pod \"route-controller-manager-59b55d474f-sg4fd\" (UID: \"972f09f4-09ea-43b2-9255-9c80819873a9\") " pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.906895 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:35 crc kubenswrapper[4747]: I1202 16:47:35.915466 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:36 crc kubenswrapper[4747]: I1202 16:47:36.134548 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-55fdb685f9-2m6jc"] Dec 02 16:47:36 crc kubenswrapper[4747]: I1202 16:47:36.309628 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd"] Dec 02 16:47:36 crc kubenswrapper[4747]: I1202 16:47:36.403858 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd"] Dec 02 16:47:36 crc kubenswrapper[4747]: W1202 16:47:36.409832 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod972f09f4_09ea_43b2_9255_9c80819873a9.slice/crio-e1ac1d020c6eaa2cefb5edfaeefb6e6545e06b818549c6a0cc94839400690442 WatchSource:0}: Error finding container e1ac1d020c6eaa2cefb5edfaeefb6e6545e06b818549c6a0cc94839400690442: Status 404 returned error can't find the container with id e1ac1d020c6eaa2cefb5edfaeefb6e6545e06b818549c6a0cc94839400690442 Dec 02 16:47:36 crc kubenswrapper[4747]: I1202 16:47:36.544713 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" event={"ID":"6b1664b5-ee6e-4766-a8c9-f28dff7616e9","Type":"ContainerStarted","Data":"b979544ade450b98d58ae2cd75ec09deba25edf39bc07043d46369bfd655813b"} Dec 02 16:47:36 crc kubenswrapper[4747]: I1202 16:47:36.544761 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" event={"ID":"6b1664b5-ee6e-4766-a8c9-f28dff7616e9","Type":"ContainerStarted","Data":"44d3480d3a7c69f1b20e10ef28acac73262907ea3858356c04a07882eab5a6f4"} Dec 02 16:47:36 crc kubenswrapper[4747]: I1202 16:47:36.546890 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:36 crc kubenswrapper[4747]: I1202 16:47:36.548733 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" event={"ID":"972f09f4-09ea-43b2-9255-9c80819873a9","Type":"ContainerStarted","Data":"e1ac1d020c6eaa2cefb5edfaeefb6e6545e06b818549c6a0cc94839400690442"} Dec 02 16:47:36 crc kubenswrapper[4747]: I1202 16:47:36.554371 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" Dec 02 16:47:36 crc kubenswrapper[4747]: I1202 16:47:36.618243 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-55fdb685f9-2m6jc" podStartSLOduration=3.618220428 podStartE2EDuration="3.618220428s" podCreationTimestamp="2025-12-02 16:47:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:47:36.586510571 +0000 UTC m=+287.113399330" watchObservedRunningTime="2025-12-02 16:47:36.618220428 +0000 UTC m=+287.145109177" Dec 02 16:47:37 crc kubenswrapper[4747]: I1202 16:47:37.555442 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" event={"ID":"972f09f4-09ea-43b2-9255-9c80819873a9","Type":"ContainerStarted","Data":"78fa0743d0ba9c9ea0e916faeddd92dbcc978646d4c3ad3406c9360c901da74b"} Dec 02 16:47:37 crc kubenswrapper[4747]: I1202 16:47:37.555585 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" podUID="972f09f4-09ea-43b2-9255-9c80819873a9" containerName="route-controller-manager" containerID="cri-o://78fa0743d0ba9c9ea0e916faeddd92dbcc978646d4c3ad3406c9360c901da74b" gracePeriod=30 Dec 02 16:47:37 crc kubenswrapper[4747]: I1202 16:47:37.555855 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:37 crc kubenswrapper[4747]: I1202 16:47:37.564685 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:37 crc kubenswrapper[4747]: I1202 16:47:37.576991 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" podStartSLOduration=4.5769650120000005 podStartE2EDuration="4.576965012s" podCreationTimestamp="2025-12-02 16:47:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:47:37.576170157 +0000 UTC m=+288.103058916" watchObservedRunningTime="2025-12-02 16:47:37.576965012 +0000 UTC m=+288.103853771" Dec 02 16:47:37 crc kubenswrapper[4747]: I1202 16:47:37.976590 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.024721 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl"] Dec 02 16:47:38 crc kubenswrapper[4747]: E1202 16:47:38.025332 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="972f09f4-09ea-43b2-9255-9c80819873a9" containerName="route-controller-manager" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.025356 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="972f09f4-09ea-43b2-9255-9c80819873a9" containerName="route-controller-manager" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.025540 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="972f09f4-09ea-43b2-9255-9c80819873a9" containerName="route-controller-manager" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.027482 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.038373 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl"] Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.058305 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xf888\" (UniqueName: \"kubernetes.io/projected/972f09f4-09ea-43b2-9255-9c80819873a9-kube-api-access-xf888\") pod \"972f09f4-09ea-43b2-9255-9c80819873a9\" (UID: \"972f09f4-09ea-43b2-9255-9c80819873a9\") " Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.058445 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/972f09f4-09ea-43b2-9255-9c80819873a9-config\") pod \"972f09f4-09ea-43b2-9255-9c80819873a9\" (UID: \"972f09f4-09ea-43b2-9255-9c80819873a9\") " Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.058592 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/972f09f4-09ea-43b2-9255-9c80819873a9-client-ca\") pod \"972f09f4-09ea-43b2-9255-9c80819873a9\" (UID: \"972f09f4-09ea-43b2-9255-9c80819873a9\") " Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.058747 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/972f09f4-09ea-43b2-9255-9c80819873a9-serving-cert\") pod \"972f09f4-09ea-43b2-9255-9c80819873a9\" (UID: \"972f09f4-09ea-43b2-9255-9c80819873a9\") " Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.061365 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/972f09f4-09ea-43b2-9255-9c80819873a9-client-ca" (OuterVolumeSpecName: "client-ca") pod "972f09f4-09ea-43b2-9255-9c80819873a9" (UID: "972f09f4-09ea-43b2-9255-9c80819873a9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.062108 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/972f09f4-09ea-43b2-9255-9c80819873a9-config" (OuterVolumeSpecName: "config") pod "972f09f4-09ea-43b2-9255-9c80819873a9" (UID: "972f09f4-09ea-43b2-9255-9c80819873a9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.069300 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/972f09f4-09ea-43b2-9255-9c80819873a9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "972f09f4-09ea-43b2-9255-9c80819873a9" (UID: "972f09f4-09ea-43b2-9255-9c80819873a9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.069501 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/972f09f4-09ea-43b2-9255-9c80819873a9-kube-api-access-xf888" (OuterVolumeSpecName: "kube-api-access-xf888") pod "972f09f4-09ea-43b2-9255-9c80819873a9" (UID: "972f09f4-09ea-43b2-9255-9c80819873a9"). InnerVolumeSpecName "kube-api-access-xf888". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.159939 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrdh5\" (UniqueName: \"kubernetes.io/projected/b9cdaa9e-5f69-49e6-9807-0c123dc9c131-kube-api-access-lrdh5\") pod \"route-controller-manager-77568bc8fc-vktzl\" (UID: \"b9cdaa9e-5f69-49e6-9807-0c123dc9c131\") " pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.160009 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9cdaa9e-5f69-49e6-9807-0c123dc9c131-config\") pod \"route-controller-manager-77568bc8fc-vktzl\" (UID: \"b9cdaa9e-5f69-49e6-9807-0c123dc9c131\") " pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.160051 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b9cdaa9e-5f69-49e6-9807-0c123dc9c131-client-ca\") pod \"route-controller-manager-77568bc8fc-vktzl\" (UID: \"b9cdaa9e-5f69-49e6-9807-0c123dc9c131\") " pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.160090 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9cdaa9e-5f69-49e6-9807-0c123dc9c131-serving-cert\") pod \"route-controller-manager-77568bc8fc-vktzl\" (UID: \"b9cdaa9e-5f69-49e6-9807-0c123dc9c131\") " pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.160153 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/972f09f4-09ea-43b2-9255-9c80819873a9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.160173 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xf888\" (UniqueName: \"kubernetes.io/projected/972f09f4-09ea-43b2-9255-9c80819873a9-kube-api-access-xf888\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.160183 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/972f09f4-09ea-43b2-9255-9c80819873a9-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.160192 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/972f09f4-09ea-43b2-9255-9c80819873a9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.262309 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9cdaa9e-5f69-49e6-9807-0c123dc9c131-serving-cert\") pod \"route-controller-manager-77568bc8fc-vktzl\" (UID: \"b9cdaa9e-5f69-49e6-9807-0c123dc9c131\") " pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.262843 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrdh5\" (UniqueName: \"kubernetes.io/projected/b9cdaa9e-5f69-49e6-9807-0c123dc9c131-kube-api-access-lrdh5\") pod \"route-controller-manager-77568bc8fc-vktzl\" (UID: \"b9cdaa9e-5f69-49e6-9807-0c123dc9c131\") " pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.262969 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9cdaa9e-5f69-49e6-9807-0c123dc9c131-config\") pod \"route-controller-manager-77568bc8fc-vktzl\" (UID: \"b9cdaa9e-5f69-49e6-9807-0c123dc9c131\") " pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.263091 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b9cdaa9e-5f69-49e6-9807-0c123dc9c131-client-ca\") pod \"route-controller-manager-77568bc8fc-vktzl\" (UID: \"b9cdaa9e-5f69-49e6-9807-0c123dc9c131\") " pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.264597 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b9cdaa9e-5f69-49e6-9807-0c123dc9c131-client-ca\") pod \"route-controller-manager-77568bc8fc-vktzl\" (UID: \"b9cdaa9e-5f69-49e6-9807-0c123dc9c131\") " pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.264825 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9cdaa9e-5f69-49e6-9807-0c123dc9c131-config\") pod \"route-controller-manager-77568bc8fc-vktzl\" (UID: \"b9cdaa9e-5f69-49e6-9807-0c123dc9c131\") " pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.266856 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9cdaa9e-5f69-49e6-9807-0c123dc9c131-serving-cert\") pod \"route-controller-manager-77568bc8fc-vktzl\" (UID: \"b9cdaa9e-5f69-49e6-9807-0c123dc9c131\") " pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.282694 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrdh5\" (UniqueName: \"kubernetes.io/projected/b9cdaa9e-5f69-49e6-9807-0c123dc9c131-kube-api-access-lrdh5\") pod \"route-controller-manager-77568bc8fc-vktzl\" (UID: \"b9cdaa9e-5f69-49e6-9807-0c123dc9c131\") " pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.356311 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.566997 4747 generic.go:334] "Generic (PLEG): container finished" podID="972f09f4-09ea-43b2-9255-9c80819873a9" containerID="78fa0743d0ba9c9ea0e916faeddd92dbcc978646d4c3ad3406c9360c901da74b" exitCode=0 Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.567830 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.569481 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" event={"ID":"972f09f4-09ea-43b2-9255-9c80819873a9","Type":"ContainerDied","Data":"78fa0743d0ba9c9ea0e916faeddd92dbcc978646d4c3ad3406c9360c901da74b"} Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.569535 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd" event={"ID":"972f09f4-09ea-43b2-9255-9c80819873a9","Type":"ContainerDied","Data":"e1ac1d020c6eaa2cefb5edfaeefb6e6545e06b818549c6a0cc94839400690442"} Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.569561 4747 scope.go:117] "RemoveContainer" containerID="78fa0743d0ba9c9ea0e916faeddd92dbcc978646d4c3ad3406c9360c901da74b" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.577723 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl"] Dec 02 16:47:38 crc kubenswrapper[4747]: W1202 16:47:38.579138 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9cdaa9e_5f69_49e6_9807_0c123dc9c131.slice/crio-0edb44e140183dd6b374255cf81eb477a00814e0f2b1c3252dc6e8ebac6a2942 WatchSource:0}: Error finding container 0edb44e140183dd6b374255cf81eb477a00814e0f2b1c3252dc6e8ebac6a2942: Status 404 returned error can't find the container with id 0edb44e140183dd6b374255cf81eb477a00814e0f2b1c3252dc6e8ebac6a2942 Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.595381 4747 scope.go:117] "RemoveContainer" containerID="78fa0743d0ba9c9ea0e916faeddd92dbcc978646d4c3ad3406c9360c901da74b" Dec 02 16:47:38 crc kubenswrapper[4747]: E1202 16:47:38.596027 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78fa0743d0ba9c9ea0e916faeddd92dbcc978646d4c3ad3406c9360c901da74b\": container with ID starting with 78fa0743d0ba9c9ea0e916faeddd92dbcc978646d4c3ad3406c9360c901da74b not found: ID does not exist" containerID="78fa0743d0ba9c9ea0e916faeddd92dbcc978646d4c3ad3406c9360c901da74b" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.596079 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78fa0743d0ba9c9ea0e916faeddd92dbcc978646d4c3ad3406c9360c901da74b"} err="failed to get container status \"78fa0743d0ba9c9ea0e916faeddd92dbcc978646d4c3ad3406c9360c901da74b\": rpc error: code = NotFound desc = could not find container \"78fa0743d0ba9c9ea0e916faeddd92dbcc978646d4c3ad3406c9360c901da74b\": container with ID starting with 78fa0743d0ba9c9ea0e916faeddd92dbcc978646d4c3ad3406c9360c901da74b not found: ID does not exist" Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.605055 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd"] Dec 02 16:47:38 crc kubenswrapper[4747]: I1202 16:47:38.608629 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-59b55d474f-sg4fd"] Dec 02 16:47:39 crc kubenswrapper[4747]: I1202 16:47:39.573669 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" event={"ID":"b9cdaa9e-5f69-49e6-9807-0c123dc9c131","Type":"ContainerStarted","Data":"0c9f9503584f4ccdd22dd6190d1cbaf4b6f1bfb4591ba84725d75cba6cc1a4bf"} Dec 02 16:47:39 crc kubenswrapper[4747]: I1202 16:47:39.574065 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" event={"ID":"b9cdaa9e-5f69-49e6-9807-0c123dc9c131","Type":"ContainerStarted","Data":"0edb44e140183dd6b374255cf81eb477a00814e0f2b1c3252dc6e8ebac6a2942"} Dec 02 16:47:39 crc kubenswrapper[4747]: I1202 16:47:39.574082 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" Dec 02 16:47:39 crc kubenswrapper[4747]: I1202 16:47:39.580033 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" Dec 02 16:47:39 crc kubenswrapper[4747]: I1202 16:47:39.595398 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-77568bc8fc-vktzl" podStartSLOduration=3.595374823 podStartE2EDuration="3.595374823s" podCreationTimestamp="2025-12-02 16:47:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:47:39.594666201 +0000 UTC m=+290.121554950" watchObservedRunningTime="2025-12-02 16:47:39.595374823 +0000 UTC m=+290.122263572" Dec 02 16:47:39 crc kubenswrapper[4747]: I1202 16:47:39.771480 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="972f09f4-09ea-43b2-9255-9c80819873a9" path="/var/lib/kubelet/pods/972f09f4-09ea-43b2-9255-9c80819873a9/volumes" Dec 02 16:47:42 crc kubenswrapper[4747]: I1202 16:47:42.376812 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 02 16:47:49 crc kubenswrapper[4747]: I1202 16:47:49.625243 4747 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 02 16:47:53 crc kubenswrapper[4747]: I1202 16:47:53.922982 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 02 16:48:26 crc kubenswrapper[4747]: I1202 16:48:26.107735 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cd7fp"] Dec 02 16:48:26 crc kubenswrapper[4747]: I1202 16:48:26.978527 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qk2zr"] Dec 02 16:48:26 crc kubenswrapper[4747]: I1202 16:48:26.980028 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.006411 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qk2zr"] Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.086439 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1a8a73f0-5162-4348-b79f-fe418f14bbfe-bound-sa-token\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.086885 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.087064 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/1a8a73f0-5162-4348-b79f-fe418f14bbfe-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.087179 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/1a8a73f0-5162-4348-b79f-fe418f14bbfe-registry-tls\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.087323 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/1a8a73f0-5162-4348-b79f-fe418f14bbfe-registry-certificates\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.087424 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1a8a73f0-5162-4348-b79f-fe418f14bbfe-trusted-ca\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.087587 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/1a8a73f0-5162-4348-b79f-fe418f14bbfe-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.087694 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfqgm\" (UniqueName: \"kubernetes.io/projected/1a8a73f0-5162-4348-b79f-fe418f14bbfe-kube-api-access-dfqgm\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.120347 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.189118 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/1a8a73f0-5162-4348-b79f-fe418f14bbfe-registry-certificates\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.189165 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1a8a73f0-5162-4348-b79f-fe418f14bbfe-trusted-ca\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.189207 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/1a8a73f0-5162-4348-b79f-fe418f14bbfe-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.189229 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfqgm\" (UniqueName: \"kubernetes.io/projected/1a8a73f0-5162-4348-b79f-fe418f14bbfe-kube-api-access-dfqgm\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.189275 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1a8a73f0-5162-4348-b79f-fe418f14bbfe-bound-sa-token\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.189301 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/1a8a73f0-5162-4348-b79f-fe418f14bbfe-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.189317 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/1a8a73f0-5162-4348-b79f-fe418f14bbfe-registry-tls\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.190441 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/1a8a73f0-5162-4348-b79f-fe418f14bbfe-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.191313 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1a8a73f0-5162-4348-b79f-fe418f14bbfe-trusted-ca\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.191481 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/1a8a73f0-5162-4348-b79f-fe418f14bbfe-registry-certificates\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.196813 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/1a8a73f0-5162-4348-b79f-fe418f14bbfe-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.196827 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/1a8a73f0-5162-4348-b79f-fe418f14bbfe-registry-tls\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.206310 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1a8a73f0-5162-4348-b79f-fe418f14bbfe-bound-sa-token\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.208290 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfqgm\" (UniqueName: \"kubernetes.io/projected/1a8a73f0-5162-4348-b79f-fe418f14bbfe-kube-api-access-dfqgm\") pod \"image-registry-66df7c8f76-qk2zr\" (UID: \"1a8a73f0-5162-4348-b79f-fe418f14bbfe\") " pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.303274 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.725025 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qk2zr"] Dec 02 16:48:27 crc kubenswrapper[4747]: I1202 16:48:27.841602 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" event={"ID":"1a8a73f0-5162-4348-b79f-fe418f14bbfe","Type":"ContainerStarted","Data":"bb2565b27fecaaa4d93375ca76d91b6ef63f0e33000886988b0d083ff4f25e4e"} Dec 02 16:48:28 crc kubenswrapper[4747]: I1202 16:48:28.849051 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" event={"ID":"1a8a73f0-5162-4348-b79f-fe418f14bbfe","Type":"ContainerStarted","Data":"96ff7e232c463b6908707a807c5a0b6c87cfea84f454f758b6f6c21adccdefc6"} Dec 02 16:48:28 crc kubenswrapper[4747]: I1202 16:48:28.849355 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:38 crc kubenswrapper[4747]: I1202 16:48:38.701078 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" podStartSLOduration=12.701049078 podStartE2EDuration="12.701049078s" podCreationTimestamp="2025-12-02 16:48:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:48:28.87137472 +0000 UTC m=+339.398263469" watchObservedRunningTime="2025-12-02 16:48:38.701049078 +0000 UTC m=+349.227937827" Dec 02 16:48:38 crc kubenswrapper[4747]: I1202 16:48:38.703144 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xlkb2"] Dec 02 16:48:38 crc kubenswrapper[4747]: I1202 16:48:38.704470 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xlkb2" Dec 02 16:48:38 crc kubenswrapper[4747]: I1202 16:48:38.710808 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 16:48:38 crc kubenswrapper[4747]: I1202 16:48:38.716979 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xlkb2"] Dec 02 16:48:38 crc kubenswrapper[4747]: I1202 16:48:38.849612 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ba8abe4-42dc-4c6d-93fa-4ed196ff9105-utilities\") pod \"community-operators-xlkb2\" (UID: \"0ba8abe4-42dc-4c6d-93fa-4ed196ff9105\") " pod="openshift-marketplace/community-operators-xlkb2" Dec 02 16:48:38 crc kubenswrapper[4747]: I1202 16:48:38.849722 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq7z2\" (UniqueName: \"kubernetes.io/projected/0ba8abe4-42dc-4c6d-93fa-4ed196ff9105-kube-api-access-dq7z2\") pod \"community-operators-xlkb2\" (UID: \"0ba8abe4-42dc-4c6d-93fa-4ed196ff9105\") " pod="openshift-marketplace/community-operators-xlkb2" Dec 02 16:48:38 crc kubenswrapper[4747]: I1202 16:48:38.849789 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ba8abe4-42dc-4c6d-93fa-4ed196ff9105-catalog-content\") pod \"community-operators-xlkb2\" (UID: \"0ba8abe4-42dc-4c6d-93fa-4ed196ff9105\") " pod="openshift-marketplace/community-operators-xlkb2" Dec 02 16:48:38 crc kubenswrapper[4747]: I1202 16:48:38.950429 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq7z2\" (UniqueName: \"kubernetes.io/projected/0ba8abe4-42dc-4c6d-93fa-4ed196ff9105-kube-api-access-dq7z2\") pod \"community-operators-xlkb2\" (UID: \"0ba8abe4-42dc-4c6d-93fa-4ed196ff9105\") " pod="openshift-marketplace/community-operators-xlkb2" Dec 02 16:48:38 crc kubenswrapper[4747]: I1202 16:48:38.950496 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ba8abe4-42dc-4c6d-93fa-4ed196ff9105-catalog-content\") pod \"community-operators-xlkb2\" (UID: \"0ba8abe4-42dc-4c6d-93fa-4ed196ff9105\") " pod="openshift-marketplace/community-operators-xlkb2" Dec 02 16:48:38 crc kubenswrapper[4747]: I1202 16:48:38.950563 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ba8abe4-42dc-4c6d-93fa-4ed196ff9105-utilities\") pod \"community-operators-xlkb2\" (UID: \"0ba8abe4-42dc-4c6d-93fa-4ed196ff9105\") " pod="openshift-marketplace/community-operators-xlkb2" Dec 02 16:48:38 crc kubenswrapper[4747]: I1202 16:48:38.951179 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ba8abe4-42dc-4c6d-93fa-4ed196ff9105-utilities\") pod \"community-operators-xlkb2\" (UID: \"0ba8abe4-42dc-4c6d-93fa-4ed196ff9105\") " pod="openshift-marketplace/community-operators-xlkb2" Dec 02 16:48:38 crc kubenswrapper[4747]: I1202 16:48:38.951326 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ba8abe4-42dc-4c6d-93fa-4ed196ff9105-catalog-content\") pod \"community-operators-xlkb2\" (UID: \"0ba8abe4-42dc-4c6d-93fa-4ed196ff9105\") " pod="openshift-marketplace/community-operators-xlkb2" Dec 02 16:48:38 crc kubenswrapper[4747]: I1202 16:48:38.974274 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq7z2\" (UniqueName: \"kubernetes.io/projected/0ba8abe4-42dc-4c6d-93fa-4ed196ff9105-kube-api-access-dq7z2\") pod \"community-operators-xlkb2\" (UID: \"0ba8abe4-42dc-4c6d-93fa-4ed196ff9105\") " pod="openshift-marketplace/community-operators-xlkb2" Dec 02 16:48:39 crc kubenswrapper[4747]: I1202 16:48:39.032003 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xlkb2" Dec 02 16:48:39 crc kubenswrapper[4747]: I1202 16:48:39.270876 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xlkb2"] Dec 02 16:48:39 crc kubenswrapper[4747]: I1202 16:48:39.300839 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wfvnl"] Dec 02 16:48:39 crc kubenswrapper[4747]: I1202 16:48:39.302112 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wfvnl" Dec 02 16:48:39 crc kubenswrapper[4747]: I1202 16:48:39.305555 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 16:48:39 crc kubenswrapper[4747]: I1202 16:48:39.309946 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wfvnl"] Dec 02 16:48:39 crc kubenswrapper[4747]: I1202 16:48:39.457273 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/233bbc8c-b118-4875-bc52-e626d7410c69-utilities\") pod \"certified-operators-wfvnl\" (UID: \"233bbc8c-b118-4875-bc52-e626d7410c69\") " pod="openshift-marketplace/certified-operators-wfvnl" Dec 02 16:48:39 crc kubenswrapper[4747]: I1202 16:48:39.457370 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/233bbc8c-b118-4875-bc52-e626d7410c69-catalog-content\") pod \"certified-operators-wfvnl\" (UID: \"233bbc8c-b118-4875-bc52-e626d7410c69\") " pod="openshift-marketplace/certified-operators-wfvnl" Dec 02 16:48:39 crc kubenswrapper[4747]: I1202 16:48:39.457433 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqkrl\" (UniqueName: \"kubernetes.io/projected/233bbc8c-b118-4875-bc52-e626d7410c69-kube-api-access-hqkrl\") pod \"certified-operators-wfvnl\" (UID: \"233bbc8c-b118-4875-bc52-e626d7410c69\") " pod="openshift-marketplace/certified-operators-wfvnl" Dec 02 16:48:39 crc kubenswrapper[4747]: I1202 16:48:39.558641 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqkrl\" (UniqueName: \"kubernetes.io/projected/233bbc8c-b118-4875-bc52-e626d7410c69-kube-api-access-hqkrl\") pod \"certified-operators-wfvnl\" (UID: \"233bbc8c-b118-4875-bc52-e626d7410c69\") " pod="openshift-marketplace/certified-operators-wfvnl" Dec 02 16:48:39 crc kubenswrapper[4747]: I1202 16:48:39.559180 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/233bbc8c-b118-4875-bc52-e626d7410c69-utilities\") pod \"certified-operators-wfvnl\" (UID: \"233bbc8c-b118-4875-bc52-e626d7410c69\") " pod="openshift-marketplace/certified-operators-wfvnl" Dec 02 16:48:39 crc kubenswrapper[4747]: I1202 16:48:39.559223 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/233bbc8c-b118-4875-bc52-e626d7410c69-catalog-content\") pod \"certified-operators-wfvnl\" (UID: \"233bbc8c-b118-4875-bc52-e626d7410c69\") " pod="openshift-marketplace/certified-operators-wfvnl" Dec 02 16:48:39 crc kubenswrapper[4747]: I1202 16:48:39.559825 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/233bbc8c-b118-4875-bc52-e626d7410c69-catalog-content\") pod \"certified-operators-wfvnl\" (UID: \"233bbc8c-b118-4875-bc52-e626d7410c69\") " pod="openshift-marketplace/certified-operators-wfvnl" Dec 02 16:48:39 crc kubenswrapper[4747]: I1202 16:48:39.560030 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/233bbc8c-b118-4875-bc52-e626d7410c69-utilities\") pod \"certified-operators-wfvnl\" (UID: \"233bbc8c-b118-4875-bc52-e626d7410c69\") " pod="openshift-marketplace/certified-operators-wfvnl" Dec 02 16:48:39 crc kubenswrapper[4747]: I1202 16:48:39.580513 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqkrl\" (UniqueName: \"kubernetes.io/projected/233bbc8c-b118-4875-bc52-e626d7410c69-kube-api-access-hqkrl\") pod \"certified-operators-wfvnl\" (UID: \"233bbc8c-b118-4875-bc52-e626d7410c69\") " pod="openshift-marketplace/certified-operators-wfvnl" Dec 02 16:48:39 crc kubenswrapper[4747]: I1202 16:48:39.715229 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wfvnl" Dec 02 16:48:39 crc kubenswrapper[4747]: I1202 16:48:39.906741 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xlkb2" event={"ID":"0ba8abe4-42dc-4c6d-93fa-4ed196ff9105","Type":"ContainerStarted","Data":"d92b7e9ddeac2719d16fd40732af0eeb822445543e266f9045c099ad38d3ca3b"} Dec 02 16:48:40 crc kubenswrapper[4747]: I1202 16:48:40.099308 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wfvnl"] Dec 02 16:48:40 crc kubenswrapper[4747]: W1202 16:48:40.107646 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod233bbc8c_b118_4875_bc52_e626d7410c69.slice/crio-c41f0e142003281d16d19e8b69ab26144bf33bd1c979942b614959fdb7001b37 WatchSource:0}: Error finding container c41f0e142003281d16d19e8b69ab26144bf33bd1c979942b614959fdb7001b37: Status 404 returned error can't find the container with id c41f0e142003281d16d19e8b69ab26144bf33bd1c979942b614959fdb7001b37 Dec 02 16:48:40 crc kubenswrapper[4747]: I1202 16:48:40.915443 4747 generic.go:334] "Generic (PLEG): container finished" podID="0ba8abe4-42dc-4c6d-93fa-4ed196ff9105" containerID="193e6b746d74bed239f330ddf9fb27dd1a6719c95920c10842892a7ae872e434" exitCode=0 Dec 02 16:48:40 crc kubenswrapper[4747]: I1202 16:48:40.915529 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xlkb2" event={"ID":"0ba8abe4-42dc-4c6d-93fa-4ed196ff9105","Type":"ContainerDied","Data":"193e6b746d74bed239f330ddf9fb27dd1a6719c95920c10842892a7ae872e434"} Dec 02 16:48:40 crc kubenswrapper[4747]: I1202 16:48:40.920606 4747 generic.go:334] "Generic (PLEG): container finished" podID="233bbc8c-b118-4875-bc52-e626d7410c69" containerID="3bdd9a3a444f9cd761faf0d1c2c1f65e1b2916fe07a4b419de2c9003f7a7da92" exitCode=0 Dec 02 16:48:40 crc kubenswrapper[4747]: I1202 16:48:40.920671 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wfvnl" event={"ID":"233bbc8c-b118-4875-bc52-e626d7410c69","Type":"ContainerDied","Data":"3bdd9a3a444f9cd761faf0d1c2c1f65e1b2916fe07a4b419de2c9003f7a7da92"} Dec 02 16:48:40 crc kubenswrapper[4747]: I1202 16:48:40.920715 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wfvnl" event={"ID":"233bbc8c-b118-4875-bc52-e626d7410c69","Type":"ContainerStarted","Data":"c41f0e142003281d16d19e8b69ab26144bf33bd1c979942b614959fdb7001b37"} Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.103078 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2ptxc"] Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.105410 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ptxc" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.108297 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.109893 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ptxc"] Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.184190 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgmb9\" (UniqueName: \"kubernetes.io/projected/7bb1d09f-243e-488b-af49-e7836ff452c8-kube-api-access-mgmb9\") pod \"redhat-marketplace-2ptxc\" (UID: \"7bb1d09f-243e-488b-af49-e7836ff452c8\") " pod="openshift-marketplace/redhat-marketplace-2ptxc" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.184344 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bb1d09f-243e-488b-af49-e7836ff452c8-utilities\") pod \"redhat-marketplace-2ptxc\" (UID: \"7bb1d09f-243e-488b-af49-e7836ff452c8\") " pod="openshift-marketplace/redhat-marketplace-2ptxc" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.184381 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bb1d09f-243e-488b-af49-e7836ff452c8-catalog-content\") pod \"redhat-marketplace-2ptxc\" (UID: \"7bb1d09f-243e-488b-af49-e7836ff452c8\") " pod="openshift-marketplace/redhat-marketplace-2ptxc" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.285680 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bb1d09f-243e-488b-af49-e7836ff452c8-utilities\") pod \"redhat-marketplace-2ptxc\" (UID: \"7bb1d09f-243e-488b-af49-e7836ff452c8\") " pod="openshift-marketplace/redhat-marketplace-2ptxc" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.285753 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bb1d09f-243e-488b-af49-e7836ff452c8-catalog-content\") pod \"redhat-marketplace-2ptxc\" (UID: \"7bb1d09f-243e-488b-af49-e7836ff452c8\") " pod="openshift-marketplace/redhat-marketplace-2ptxc" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.285795 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgmb9\" (UniqueName: \"kubernetes.io/projected/7bb1d09f-243e-488b-af49-e7836ff452c8-kube-api-access-mgmb9\") pod \"redhat-marketplace-2ptxc\" (UID: \"7bb1d09f-243e-488b-af49-e7836ff452c8\") " pod="openshift-marketplace/redhat-marketplace-2ptxc" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.287029 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bb1d09f-243e-488b-af49-e7836ff452c8-catalog-content\") pod \"redhat-marketplace-2ptxc\" (UID: \"7bb1d09f-243e-488b-af49-e7836ff452c8\") " pod="openshift-marketplace/redhat-marketplace-2ptxc" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.287082 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bb1d09f-243e-488b-af49-e7836ff452c8-utilities\") pod \"redhat-marketplace-2ptxc\" (UID: \"7bb1d09f-243e-488b-af49-e7836ff452c8\") " pod="openshift-marketplace/redhat-marketplace-2ptxc" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.307272 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgmb9\" (UniqueName: \"kubernetes.io/projected/7bb1d09f-243e-488b-af49-e7836ff452c8-kube-api-access-mgmb9\") pod \"redhat-marketplace-2ptxc\" (UID: \"7bb1d09f-243e-488b-af49-e7836ff452c8\") " pod="openshift-marketplace/redhat-marketplace-2ptxc" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.422786 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2ptxc" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.703601 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-p55sf"] Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.705849 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p55sf" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.709830 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.716983 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p55sf"] Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.797956 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/922bf4c0-8c15-43f0-a6f7-09601df7efe0-catalog-content\") pod \"redhat-operators-p55sf\" (UID: \"922bf4c0-8c15-43f0-a6f7-09601df7efe0\") " pod="openshift-marketplace/redhat-operators-p55sf" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.798028 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79zd7\" (UniqueName: \"kubernetes.io/projected/922bf4c0-8c15-43f0-a6f7-09601df7efe0-kube-api-access-79zd7\") pod \"redhat-operators-p55sf\" (UID: \"922bf4c0-8c15-43f0-a6f7-09601df7efe0\") " pod="openshift-marketplace/redhat-operators-p55sf" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.798135 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/922bf4c0-8c15-43f0-a6f7-09601df7efe0-utilities\") pod \"redhat-operators-p55sf\" (UID: \"922bf4c0-8c15-43f0-a6f7-09601df7efe0\") " pod="openshift-marketplace/redhat-operators-p55sf" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.831047 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2ptxc"] Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.900611 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/922bf4c0-8c15-43f0-a6f7-09601df7efe0-catalog-content\") pod \"redhat-operators-p55sf\" (UID: \"922bf4c0-8c15-43f0-a6f7-09601df7efe0\") " pod="openshift-marketplace/redhat-operators-p55sf" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.900695 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79zd7\" (UniqueName: \"kubernetes.io/projected/922bf4c0-8c15-43f0-a6f7-09601df7efe0-kube-api-access-79zd7\") pod \"redhat-operators-p55sf\" (UID: \"922bf4c0-8c15-43f0-a6f7-09601df7efe0\") " pod="openshift-marketplace/redhat-operators-p55sf" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.900803 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/922bf4c0-8c15-43f0-a6f7-09601df7efe0-utilities\") pod \"redhat-operators-p55sf\" (UID: \"922bf4c0-8c15-43f0-a6f7-09601df7efe0\") " pod="openshift-marketplace/redhat-operators-p55sf" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.901245 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/922bf4c0-8c15-43f0-a6f7-09601df7efe0-catalog-content\") pod \"redhat-operators-p55sf\" (UID: \"922bf4c0-8c15-43f0-a6f7-09601df7efe0\") " pod="openshift-marketplace/redhat-operators-p55sf" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.902213 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/922bf4c0-8c15-43f0-a6f7-09601df7efe0-utilities\") pod \"redhat-operators-p55sf\" (UID: \"922bf4c0-8c15-43f0-a6f7-09601df7efe0\") " pod="openshift-marketplace/redhat-operators-p55sf" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.922655 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79zd7\" (UniqueName: \"kubernetes.io/projected/922bf4c0-8c15-43f0-a6f7-09601df7efe0-kube-api-access-79zd7\") pod \"redhat-operators-p55sf\" (UID: \"922bf4c0-8c15-43f0-a6f7-09601df7efe0\") " pod="openshift-marketplace/redhat-operators-p55sf" Dec 02 16:48:41 crc kubenswrapper[4747]: I1202 16:48:41.944837 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ptxc" event={"ID":"7bb1d09f-243e-488b-af49-e7836ff452c8","Type":"ContainerStarted","Data":"2e3708b42348ea2025f9d320a8ec496cc355d8e82f3f438fc958d225d2ead424"} Dec 02 16:48:42 crc kubenswrapper[4747]: I1202 16:48:42.038630 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p55sf" Dec 02 16:48:42 crc kubenswrapper[4747]: I1202 16:48:42.442869 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p55sf"] Dec 02 16:48:42 crc kubenswrapper[4747]: W1202 16:48:42.449256 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod922bf4c0_8c15_43f0_a6f7_09601df7efe0.slice/crio-96989e7772cdf31df5df93aa1191567a0a29bf12786a9b46e486f4211aeed756 WatchSource:0}: Error finding container 96989e7772cdf31df5df93aa1191567a0a29bf12786a9b46e486f4211aeed756: Status 404 returned error can't find the container with id 96989e7772cdf31df5df93aa1191567a0a29bf12786a9b46e486f4211aeed756 Dec 02 16:48:42 crc kubenswrapper[4747]: I1202 16:48:42.952707 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p55sf" event={"ID":"922bf4c0-8c15-43f0-a6f7-09601df7efe0","Type":"ContainerStarted","Data":"96989e7772cdf31df5df93aa1191567a0a29bf12786a9b46e486f4211aeed756"} Dec 02 16:48:42 crc kubenswrapper[4747]: I1202 16:48:42.955656 4747 generic.go:334] "Generic (PLEG): container finished" podID="7bb1d09f-243e-488b-af49-e7836ff452c8" containerID="e24bf5d563c176fe850c9223227309423985f7d3d63b1bc3e87e04e0e87dc1ea" exitCode=0 Dec 02 16:48:42 crc kubenswrapper[4747]: I1202 16:48:42.955709 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ptxc" event={"ID":"7bb1d09f-243e-488b-af49-e7836ff452c8","Type":"ContainerDied","Data":"e24bf5d563c176fe850c9223227309423985f7d3d63b1bc3e87e04e0e87dc1ea"} Dec 02 16:48:46 crc kubenswrapper[4747]: I1202 16:48:46.048011 4747 generic.go:334] "Generic (PLEG): container finished" podID="922bf4c0-8c15-43f0-a6f7-09601df7efe0" containerID="f16fab402d62edf44f09d4a01600be76333bac25c7604ae05c5cb4d328787498" exitCode=0 Dec 02 16:48:46 crc kubenswrapper[4747]: I1202 16:48:46.048102 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p55sf" event={"ID":"922bf4c0-8c15-43f0-a6f7-09601df7efe0","Type":"ContainerDied","Data":"f16fab402d62edf44f09d4a01600be76333bac25c7604ae05c5cb4d328787498"} Dec 02 16:48:46 crc kubenswrapper[4747]: I1202 16:48:46.052463 4747 generic.go:334] "Generic (PLEG): container finished" podID="7bb1d09f-243e-488b-af49-e7836ff452c8" containerID="f4ca62fe0e62c3cae2ef961ffc9f4bbb361ae87681624be199ac18e220f7f14f" exitCode=0 Dec 02 16:48:46 crc kubenswrapper[4747]: I1202 16:48:46.052531 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ptxc" event={"ID":"7bb1d09f-243e-488b-af49-e7836ff452c8","Type":"ContainerDied","Data":"f4ca62fe0e62c3cae2ef961ffc9f4bbb361ae87681624be199ac18e220f7f14f"} Dec 02 16:48:46 crc kubenswrapper[4747]: I1202 16:48:46.057820 4747 generic.go:334] "Generic (PLEG): container finished" podID="0ba8abe4-42dc-4c6d-93fa-4ed196ff9105" containerID="410aab6edc5d600292326956dda6f5ad2fbf022fde9c356808b92a7b11893bad" exitCode=0 Dec 02 16:48:46 crc kubenswrapper[4747]: I1202 16:48:46.057932 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xlkb2" event={"ID":"0ba8abe4-42dc-4c6d-93fa-4ed196ff9105","Type":"ContainerDied","Data":"410aab6edc5d600292326956dda6f5ad2fbf022fde9c356808b92a7b11893bad"} Dec 02 16:48:46 crc kubenswrapper[4747]: I1202 16:48:46.060657 4747 generic.go:334] "Generic (PLEG): container finished" podID="233bbc8c-b118-4875-bc52-e626d7410c69" containerID="04fd35e47c95c32a9dbed1724135e3d8a4d2c37db4cb69008b1e7b516d8d2670" exitCode=0 Dec 02 16:48:46 crc kubenswrapper[4747]: I1202 16:48:46.060697 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wfvnl" event={"ID":"233bbc8c-b118-4875-bc52-e626d7410c69","Type":"ContainerDied","Data":"04fd35e47c95c32a9dbed1724135e3d8a4d2c37db4cb69008b1e7b516d8d2670"} Dec 02 16:48:47 crc kubenswrapper[4747]: I1202 16:48:47.084425 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wfvnl" event={"ID":"233bbc8c-b118-4875-bc52-e626d7410c69","Type":"ContainerStarted","Data":"b0ad16be66baaa762a09aba2011c1787881857e61494cb7c0e1f1b1834472fd4"} Dec 02 16:48:47 crc kubenswrapper[4747]: I1202 16:48:47.089150 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p55sf" event={"ID":"922bf4c0-8c15-43f0-a6f7-09601df7efe0","Type":"ContainerStarted","Data":"2af5da2ba1fb202d8c07ba72852ed1374403a92bcdb845986ffa84b100f99bcd"} Dec 02 16:48:47 crc kubenswrapper[4747]: I1202 16:48:47.091688 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2ptxc" event={"ID":"7bb1d09f-243e-488b-af49-e7836ff452c8","Type":"ContainerStarted","Data":"61ecd4f6a6aa33d7f095bdf5091f977be9eccb0d139b597061cf15b2db747081"} Dec 02 16:48:47 crc kubenswrapper[4747]: I1202 16:48:47.094827 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xlkb2" event={"ID":"0ba8abe4-42dc-4c6d-93fa-4ed196ff9105","Type":"ContainerStarted","Data":"15e3a13b8ed4ec715bfd881a326c21a0a717eca2b23c1920b67dc3c04fd21a45"} Dec 02 16:48:47 crc kubenswrapper[4747]: I1202 16:48:47.108724 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wfvnl" podStartSLOduration=2.50714571 podStartE2EDuration="8.108696963s" podCreationTimestamp="2025-12-02 16:48:39 +0000 UTC" firstStartedPulling="2025-12-02 16:48:40.922128968 +0000 UTC m=+351.449017717" lastFinishedPulling="2025-12-02 16:48:46.523680221 +0000 UTC m=+357.050568970" observedRunningTime="2025-12-02 16:48:47.105459555 +0000 UTC m=+357.632348304" watchObservedRunningTime="2025-12-02 16:48:47.108696963 +0000 UTC m=+357.635585712" Dec 02 16:48:47 crc kubenswrapper[4747]: I1202 16:48:47.129820 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xlkb2" podStartSLOduration=3.540684064 podStartE2EDuration="9.129789281s" podCreationTimestamp="2025-12-02 16:48:38 +0000 UTC" firstStartedPulling="2025-12-02 16:48:40.917775086 +0000 UTC m=+351.444663835" lastFinishedPulling="2025-12-02 16:48:46.506880313 +0000 UTC m=+357.033769052" observedRunningTime="2025-12-02 16:48:47.124808471 +0000 UTC m=+357.651697220" watchObservedRunningTime="2025-12-02 16:48:47.129789281 +0000 UTC m=+357.656678030" Dec 02 16:48:47 crc kubenswrapper[4747]: I1202 16:48:47.181099 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2ptxc" podStartSLOduration=3.078167782 podStartE2EDuration="6.181074091s" podCreationTimestamp="2025-12-02 16:48:41 +0000 UTC" firstStartedPulling="2025-12-02 16:48:43.389876148 +0000 UTC m=+353.916764897" lastFinishedPulling="2025-12-02 16:48:46.492782457 +0000 UTC m=+357.019671206" observedRunningTime="2025-12-02 16:48:47.156255602 +0000 UTC m=+357.683144341" watchObservedRunningTime="2025-12-02 16:48:47.181074091 +0000 UTC m=+357.707962850" Dec 02 16:48:47 crc kubenswrapper[4747]: I1202 16:48:47.309885 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-qk2zr" Dec 02 16:48:47 crc kubenswrapper[4747]: I1202 16:48:47.376930 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-9jst4"] Dec 02 16:48:48 crc kubenswrapper[4747]: I1202 16:48:48.103030 4747 generic.go:334] "Generic (PLEG): container finished" podID="922bf4c0-8c15-43f0-a6f7-09601df7efe0" containerID="2af5da2ba1fb202d8c07ba72852ed1374403a92bcdb845986ffa84b100f99bcd" exitCode=0 Dec 02 16:48:48 crc kubenswrapper[4747]: I1202 16:48:48.103125 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p55sf" event={"ID":"922bf4c0-8c15-43f0-a6f7-09601df7efe0","Type":"ContainerDied","Data":"2af5da2ba1fb202d8c07ba72852ed1374403a92bcdb845986ffa84b100f99bcd"} Dec 02 16:48:49 crc kubenswrapper[4747]: I1202 16:48:49.032502 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xlkb2" Dec 02 16:48:49 crc kubenswrapper[4747]: I1202 16:48:49.032632 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xlkb2" Dec 02 16:48:49 crc kubenswrapper[4747]: I1202 16:48:49.085604 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xlkb2" Dec 02 16:48:49 crc kubenswrapper[4747]: I1202 16:48:49.115816 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p55sf" event={"ID":"922bf4c0-8c15-43f0-a6f7-09601df7efe0","Type":"ContainerStarted","Data":"2422471ad88fff4eb32be23453768eebfd9c409826d3e4750ad285e9d79cf01d"} Dec 02 16:48:49 crc kubenswrapper[4747]: I1202 16:48:49.139562 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-p55sf" podStartSLOduration=5.440509576 podStartE2EDuration="8.13954466s" podCreationTimestamp="2025-12-02 16:48:41 +0000 UTC" firstStartedPulling="2025-12-02 16:48:46.050511231 +0000 UTC m=+356.577399980" lastFinishedPulling="2025-12-02 16:48:48.749546305 +0000 UTC m=+359.276435064" observedRunningTime="2025-12-02 16:48:49.139297032 +0000 UTC m=+359.666185781" watchObservedRunningTime="2025-12-02 16:48:49.13954466 +0000 UTC m=+359.666433409" Dec 02 16:48:49 crc kubenswrapper[4747]: I1202 16:48:49.715926 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wfvnl" Dec 02 16:48:49 crc kubenswrapper[4747]: I1202 16:48:49.716743 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wfvnl" Dec 02 16:48:49 crc kubenswrapper[4747]: I1202 16:48:49.768506 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wfvnl" Dec 02 16:48:51 crc kubenswrapper[4747]: I1202 16:48:51.139139 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" podUID="0e56253a-5b24-4cac-8f3f-9b357bc12f82" containerName="oauth-openshift" containerID="cri-o://7c1b109f9951a30d908b8b38a55cc7416cbc5f10a634339b761f94d197730771" gracePeriod=15 Dec 02 16:48:51 crc kubenswrapper[4747]: I1202 16:48:51.169286 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wfvnl" Dec 02 16:48:51 crc kubenswrapper[4747]: I1202 16:48:51.424018 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2ptxc" Dec 02 16:48:51 crc kubenswrapper[4747]: I1202 16:48:51.424109 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2ptxc" Dec 02 16:48:51 crc kubenswrapper[4747]: I1202 16:48:51.473522 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2ptxc" Dec 02 16:48:52 crc kubenswrapper[4747]: I1202 16:48:52.039382 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-p55sf" Dec 02 16:48:52 crc kubenswrapper[4747]: I1202 16:48:52.039684 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-p55sf" Dec 02 16:48:52 crc kubenswrapper[4747]: I1202 16:48:52.173474 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2ptxc" Dec 02 16:48:53 crc kubenswrapper[4747]: I1202 16:48:53.075400 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-p55sf" podUID="922bf4c0-8c15-43f0-a6f7-09601df7efe0" containerName="registry-server" probeResult="failure" output=< Dec 02 16:48:53 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Dec 02 16:48:53 crc kubenswrapper[4747]: > Dec 02 16:48:54 crc kubenswrapper[4747]: I1202 16:48:54.149450 4747 generic.go:334] "Generic (PLEG): container finished" podID="0e56253a-5b24-4cac-8f3f-9b357bc12f82" containerID="7c1b109f9951a30d908b8b38a55cc7416cbc5f10a634339b761f94d197730771" exitCode=0 Dec 02 16:48:54 crc kubenswrapper[4747]: I1202 16:48:54.149509 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" event={"ID":"0e56253a-5b24-4cac-8f3f-9b357bc12f82","Type":"ContainerDied","Data":"7c1b109f9951a30d908b8b38a55cc7416cbc5f10a634339b761f94d197730771"} Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.257360 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.296684 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-575cc5b957-67pdc"] Dec 02 16:48:55 crc kubenswrapper[4747]: E1202 16:48:55.297006 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e56253a-5b24-4cac-8f3f-9b357bc12f82" containerName="oauth-openshift" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.297020 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e56253a-5b24-4cac-8f3f-9b357bc12f82" containerName="oauth-openshift" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.297153 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e56253a-5b24-4cac-8f3f-9b357bc12f82" containerName="oauth-openshift" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.297690 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.307283 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-575cc5b957-67pdc"] Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.313984 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-login\") pod \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314024 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-router-certs\") pod \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314041 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7nz7\" (UniqueName: \"kubernetes.io/projected/0e56253a-5b24-4cac-8f3f-9b357bc12f82-kube-api-access-t7nz7\") pod \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314064 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-audit-policies\") pod \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314085 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-provider-selection\") pod \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314103 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-cliconfig\") pod \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314130 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-idp-0-file-data\") pod \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314150 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-trusted-ca-bundle\") pod \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314169 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-error\") pod \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314192 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-session\") pod \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314211 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-service-ca\") pod \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314234 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0e56253a-5b24-4cac-8f3f-9b357bc12f82-audit-dir\") pod \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314254 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-ocp-branding-template\") pod \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314282 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-serving-cert\") pod \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\" (UID: \"0e56253a-5b24-4cac-8f3f-9b357bc12f82\") " Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314378 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314399 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314430 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-router-certs\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314455 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314476 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314494 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-session\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314514 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/914d3a03-d594-4e74-a1e3-4072396f49bc-audit-dir\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314542 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/914d3a03-d594-4e74-a1e3-4072396f49bc-audit-policies\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314559 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314580 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2trt\" (UniqueName: \"kubernetes.io/projected/914d3a03-d594-4e74-a1e3-4072396f49bc-kube-api-access-q2trt\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314599 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314620 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-user-template-login\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314641 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-user-template-error\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.314660 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-service-ca\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.315706 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0e56253a-5b24-4cac-8f3f-9b357bc12f82-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "0e56253a-5b24-4cac-8f3f-9b357bc12f82" (UID: "0e56253a-5b24-4cac-8f3f-9b357bc12f82"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.317117 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "0e56253a-5b24-4cac-8f3f-9b357bc12f82" (UID: "0e56253a-5b24-4cac-8f3f-9b357bc12f82"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.317225 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "0e56253a-5b24-4cac-8f3f-9b357bc12f82" (UID: "0e56253a-5b24-4cac-8f3f-9b357bc12f82"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.317461 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "0e56253a-5b24-4cac-8f3f-9b357bc12f82" (UID: "0e56253a-5b24-4cac-8f3f-9b357bc12f82"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.317974 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "0e56253a-5b24-4cac-8f3f-9b357bc12f82" (UID: "0e56253a-5b24-4cac-8f3f-9b357bc12f82"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.324988 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "0e56253a-5b24-4cac-8f3f-9b357bc12f82" (UID: "0e56253a-5b24-4cac-8f3f-9b357bc12f82"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.325037 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e56253a-5b24-4cac-8f3f-9b357bc12f82-kube-api-access-t7nz7" (OuterVolumeSpecName: "kube-api-access-t7nz7") pod "0e56253a-5b24-4cac-8f3f-9b357bc12f82" (UID: "0e56253a-5b24-4cac-8f3f-9b357bc12f82"). InnerVolumeSpecName "kube-api-access-t7nz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.325364 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "0e56253a-5b24-4cac-8f3f-9b357bc12f82" (UID: "0e56253a-5b24-4cac-8f3f-9b357bc12f82"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.325733 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "0e56253a-5b24-4cac-8f3f-9b357bc12f82" (UID: "0e56253a-5b24-4cac-8f3f-9b357bc12f82"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.326199 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "0e56253a-5b24-4cac-8f3f-9b357bc12f82" (UID: "0e56253a-5b24-4cac-8f3f-9b357bc12f82"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.326563 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "0e56253a-5b24-4cac-8f3f-9b357bc12f82" (UID: "0e56253a-5b24-4cac-8f3f-9b357bc12f82"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.328001 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "0e56253a-5b24-4cac-8f3f-9b357bc12f82" (UID: "0e56253a-5b24-4cac-8f3f-9b357bc12f82"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.329210 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "0e56253a-5b24-4cac-8f3f-9b357bc12f82" (UID: "0e56253a-5b24-4cac-8f3f-9b357bc12f82"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.329345 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "0e56253a-5b24-4cac-8f3f-9b357bc12f82" (UID: "0e56253a-5b24-4cac-8f3f-9b357bc12f82"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.416792 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.416948 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.417020 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-session\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.417059 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/914d3a03-d594-4e74-a1e3-4072396f49bc-audit-dir\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.417099 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/914d3a03-d594-4e74-a1e3-4072396f49bc-audit-policies\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.417125 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.417223 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2trt\" (UniqueName: \"kubernetes.io/projected/914d3a03-d594-4e74-a1e3-4072396f49bc-kube-api-access-q2trt\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.417263 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.417350 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-user-template-login\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.417383 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-user-template-error\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.417416 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-service-ca\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.417466 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.417506 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.417558 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-router-certs\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.417714 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/914d3a03-d594-4e74-a1e3-4072396f49bc-audit-dir\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.419476 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.419602 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/914d3a03-d594-4e74-a1e3-4072396f49bc-audit-policies\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.419715 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.419824 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7nz7\" (UniqueName: \"kubernetes.io/projected/0e56253a-5b24-4cac-8f3f-9b357bc12f82-kube-api-access-t7nz7\") on node \"crc\" DevicePath \"\"" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.419885 4747 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.419950 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.419978 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.420030 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.420025 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-service-ca\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.420035 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.420049 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.420151 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.420174 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.420193 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.420209 4747 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0e56253a-5b24-4cac-8f3f-9b357bc12f82-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.420230 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.420249 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.420266 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0e56253a-5b24-4cac-8f3f-9b357bc12f82-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.423108 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-router-certs\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.423344 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.423641 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.423648 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.424762 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-user-template-error\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.424804 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.425117 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-system-session\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.432713 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/914d3a03-d594-4e74-a1e3-4072396f49bc-v4-0-config-user-template-login\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.435811 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2trt\" (UniqueName: \"kubernetes.io/projected/914d3a03-d594-4e74-a1e3-4072396f49bc-kube-api-access-q2trt\") pod \"oauth-openshift-575cc5b957-67pdc\" (UID: \"914d3a03-d594-4e74-a1e3-4072396f49bc\") " pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:55 crc kubenswrapper[4747]: I1202 16:48:55.663298 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:56 crc kubenswrapper[4747]: I1202 16:48:56.132236 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-575cc5b957-67pdc"] Dec 02 16:48:56 crc kubenswrapper[4747]: I1202 16:48:56.165306 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" event={"ID":"914d3a03-d594-4e74-a1e3-4072396f49bc","Type":"ContainerStarted","Data":"8502ef2e183370c1187b520754e578805b86377792e688fef6e3acc6dc4a5648"} Dec 02 16:48:56 crc kubenswrapper[4747]: I1202 16:48:56.167059 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" event={"ID":"0e56253a-5b24-4cac-8f3f-9b357bc12f82","Type":"ContainerDied","Data":"f7b8f4996ca1d7b4c8ba111d0c79030fcedd38ef5d597489a211ef90ea5413e7"} Dec 02 16:48:56 crc kubenswrapper[4747]: I1202 16:48:56.167098 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cd7fp" Dec 02 16:48:56 crc kubenswrapper[4747]: I1202 16:48:56.167135 4747 scope.go:117] "RemoveContainer" containerID="7c1b109f9951a30d908b8b38a55cc7416cbc5f10a634339b761f94d197730771" Dec 02 16:48:56 crc kubenswrapper[4747]: I1202 16:48:56.196956 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cd7fp"] Dec 02 16:48:56 crc kubenswrapper[4747]: I1202 16:48:56.201940 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cd7fp"] Dec 02 16:48:57 crc kubenswrapper[4747]: I1202 16:48:57.768550 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e56253a-5b24-4cac-8f3f-9b357bc12f82" path="/var/lib/kubelet/pods/0e56253a-5b24-4cac-8f3f-9b357bc12f82/volumes" Dec 02 16:48:58 crc kubenswrapper[4747]: I1202 16:48:58.183495 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" event={"ID":"914d3a03-d594-4e74-a1e3-4072396f49bc","Type":"ContainerStarted","Data":"24b1b58a31445c734ce53f02ab8bc7f666b40a015853970fd6ca3945eaf6b595"} Dec 02 16:48:58 crc kubenswrapper[4747]: I1202 16:48:58.184163 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:58 crc kubenswrapper[4747]: I1202 16:48:58.190218 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" Dec 02 16:48:58 crc kubenswrapper[4747]: I1202 16:48:58.217822 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-575cc5b957-67pdc" podStartSLOduration=32.217795623 podStartE2EDuration="32.217795623s" podCreationTimestamp="2025-12-02 16:48:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:48:58.207061848 +0000 UTC m=+368.733950607" watchObservedRunningTime="2025-12-02 16:48:58.217795623 +0000 UTC m=+368.744684372" Dec 02 16:48:59 crc kubenswrapper[4747]: I1202 16:48:59.098336 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xlkb2" Dec 02 16:49:01 crc kubenswrapper[4747]: I1202 16:49:01.795808 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 16:49:01 crc kubenswrapper[4747]: I1202 16:49:01.795952 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 16:49:02 crc kubenswrapper[4747]: I1202 16:49:02.080373 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-p55sf" Dec 02 16:49:02 crc kubenswrapper[4747]: I1202 16:49:02.126475 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-p55sf" Dec 02 16:49:12 crc kubenswrapper[4747]: I1202 16:49:12.426378 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" podUID="4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3" containerName="registry" containerID="cri-o://47029f4074c32a9a4ea38e0b5552badfe0c91d33d098d142a4cc0de53fae332c" gracePeriod=30 Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.280608 4747 generic.go:334] "Generic (PLEG): container finished" podID="4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3" containerID="47029f4074c32a9a4ea38e0b5552badfe0c91d33d098d142a4cc0de53fae332c" exitCode=0 Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.280738 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" event={"ID":"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3","Type":"ContainerDied","Data":"47029f4074c32a9a4ea38e0b5552badfe0c91d33d098d142a4cc0de53fae332c"} Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.358560 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.419324 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-registry-tls\") pod \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.419435 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-registry-certificates\") pod \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.419533 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-trusted-ca\") pod \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.420396 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.422521 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jgnc5\" (UniqueName: \"kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-kube-api-access-jgnc5\") pod \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.420895 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.421124 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.422580 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-bound-sa-token\") pod \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.422606 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-installation-pull-secrets\") pod \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.422732 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-ca-trust-extracted\") pod \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\" (UID: \"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3\") " Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.423220 4747 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.423240 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.428784 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.431082 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.431247 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.431771 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-kube-api-access-jgnc5" (OuterVolumeSpecName: "kube-api-access-jgnc5") pod "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3"). InnerVolumeSpecName "kube-api-access-jgnc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.437966 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.442146 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3" (UID: "4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.524563 4747 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.525060 4747 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.525070 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jgnc5\" (UniqueName: \"kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-kube-api-access-jgnc5\") on node \"crc\" DevicePath \"\"" Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.525082 4747 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 16:49:13 crc kubenswrapper[4747]: I1202 16:49:13.525091 4747 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 02 16:49:14 crc kubenswrapper[4747]: I1202 16:49:14.288727 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" event={"ID":"4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3","Type":"ContainerDied","Data":"092de9915beaa93e02f207a5b5a60955446858e0da498f0ebf659b798134ecb2"} Dec 02 16:49:14 crc kubenswrapper[4747]: I1202 16:49:14.288767 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-9jst4" Dec 02 16:49:14 crc kubenswrapper[4747]: I1202 16:49:14.288796 4747 scope.go:117] "RemoveContainer" containerID="47029f4074c32a9a4ea38e0b5552badfe0c91d33d098d142a4cc0de53fae332c" Dec 02 16:49:14 crc kubenswrapper[4747]: I1202 16:49:14.315810 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-9jst4"] Dec 02 16:49:14 crc kubenswrapper[4747]: I1202 16:49:14.322190 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-9jst4"] Dec 02 16:49:15 crc kubenswrapper[4747]: I1202 16:49:15.768463 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3" path="/var/lib/kubelet/pods/4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3/volumes" Dec 02 16:49:31 crc kubenswrapper[4747]: I1202 16:49:31.795337 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 16:49:31 crc kubenswrapper[4747]: I1202 16:49:31.796988 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 16:50:01 crc kubenswrapper[4747]: I1202 16:50:01.795065 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 16:50:01 crc kubenswrapper[4747]: I1202 16:50:01.797706 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 16:50:01 crc kubenswrapper[4747]: I1202 16:50:01.797977 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:50:01 crc kubenswrapper[4747]: I1202 16:50:01.799201 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"91e508de3c4af0f147fbc7e05387ad2564a081878e3dc816210246c7720f252a"} pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 16:50:01 crc kubenswrapper[4747]: I1202 16:50:01.799506 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" containerID="cri-o://91e508de3c4af0f147fbc7e05387ad2564a081878e3dc816210246c7720f252a" gracePeriod=600 Dec 02 16:50:02 crc kubenswrapper[4747]: I1202 16:50:02.615140 4747 generic.go:334] "Generic (PLEG): container finished" podID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerID="91e508de3c4af0f147fbc7e05387ad2564a081878e3dc816210246c7720f252a" exitCode=0 Dec 02 16:50:02 crc kubenswrapper[4747]: I1202 16:50:02.615941 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerDied","Data":"91e508de3c4af0f147fbc7e05387ad2564a081878e3dc816210246c7720f252a"} Dec 02 16:50:02 crc kubenswrapper[4747]: I1202 16:50:02.615982 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"69159044e2a0a0955de6d7cd4a22699a8999351fe5ec6a3aa52db65ea45c9085"} Dec 02 16:50:02 crc kubenswrapper[4747]: I1202 16:50:02.616006 4747 scope.go:117] "RemoveContainer" containerID="e670a389687eac873ddd31be1f6a03258c16ecde51471d28f55c3b413f0c73aa" Dec 02 16:50:49 crc kubenswrapper[4747]: I1202 16:50:49.938148 4747 scope.go:117] "RemoveContainer" containerID="59fcd9d532bdebdfba5c40945814218e3549ebaead87e49be862a7f95f2abc4a" Dec 02 16:50:49 crc kubenswrapper[4747]: I1202 16:50:49.961331 4747 scope.go:117] "RemoveContainer" containerID="3099eb42bb2098850abcd3885f7ebebee14f5f17c91dc6a146cec7fb34810032" Dec 02 16:52:31 crc kubenswrapper[4747]: I1202 16:52:31.795587 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 16:52:31 crc kubenswrapper[4747]: I1202 16:52:31.796368 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 16:53:01 crc kubenswrapper[4747]: I1202 16:53:01.795324 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 16:53:01 crc kubenswrapper[4747]: I1202 16:53:01.795964 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 16:53:31 crc kubenswrapper[4747]: I1202 16:53:31.795168 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 16:53:31 crc kubenswrapper[4747]: I1202 16:53:31.796060 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 16:53:31 crc kubenswrapper[4747]: I1202 16:53:31.796125 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:53:31 crc kubenswrapper[4747]: I1202 16:53:31.797008 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"69159044e2a0a0955de6d7cd4a22699a8999351fe5ec6a3aa52db65ea45c9085"} pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 16:53:31 crc kubenswrapper[4747]: I1202 16:53:31.797068 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" containerID="cri-o://69159044e2a0a0955de6d7cd4a22699a8999351fe5ec6a3aa52db65ea45c9085" gracePeriod=600 Dec 02 16:53:32 crc kubenswrapper[4747]: I1202 16:53:32.969358 4747 generic.go:334] "Generic (PLEG): container finished" podID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerID="69159044e2a0a0955de6d7cd4a22699a8999351fe5ec6a3aa52db65ea45c9085" exitCode=0 Dec 02 16:53:32 crc kubenswrapper[4747]: I1202 16:53:32.969407 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerDied","Data":"69159044e2a0a0955de6d7cd4a22699a8999351fe5ec6a3aa52db65ea45c9085"} Dec 02 16:53:32 crc kubenswrapper[4747]: I1202 16:53:32.970339 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"5bac94c471a07d1efa3cacb30b33cf6d0f493cee57eaf241d36618d258608c30"} Dec 02 16:53:32 crc kubenswrapper[4747]: I1202 16:53:32.970369 4747 scope.go:117] "RemoveContainer" containerID="91e508de3c4af0f147fbc7e05387ad2564a081878e3dc816210246c7720f252a" Dec 02 16:54:34 crc kubenswrapper[4747]: I1202 16:54:34.137855 4747 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.350704 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-vr84r"] Dec 02 16:55:06 crc kubenswrapper[4747]: E1202 16:55:06.351833 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3" containerName="registry" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.351849 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3" containerName="registry" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.351983 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="4328bcc2-1f5b-47c1-8dfa-708f4c15a1d3" containerName="registry" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.352470 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-vr84r" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.354694 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.355867 4747 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-5cf5m" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.356959 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.361701 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-vr84r"] Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.371872 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-b9r5p"] Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.372806 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-b9r5p" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.377529 4747 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-4sbtm" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.390040 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-qb975"] Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.391054 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-qb975" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.396740 4747 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-lwp9g" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.402490 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-b9r5p"] Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.423037 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-qb975"] Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.463920 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmdsv\" (UniqueName: \"kubernetes.io/projected/2c58e5fb-8d6a-4102-9191-ca69fb40e3bd-kube-api-access-bmdsv\") pod \"cert-manager-cainjector-7f985d654d-vr84r\" (UID: \"2c58e5fb-8d6a-4102-9191-ca69fb40e3bd\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-vr84r" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.564655 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpgft\" (UniqueName: \"kubernetes.io/projected/1e49c6c3-130a-496a-88ae-6c4f9e1c8ea9-kube-api-access-bpgft\") pod \"cert-manager-5b446d88c5-b9r5p\" (UID: \"1e49c6c3-130a-496a-88ae-6c4f9e1c8ea9\") " pod="cert-manager/cert-manager-5b446d88c5-b9r5p" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.564740 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nf9b\" (UniqueName: \"kubernetes.io/projected/9ee35e2a-2967-4c76-92ec-2a7b5db87ba7-kube-api-access-6nf9b\") pod \"cert-manager-webhook-5655c58dd6-qb975\" (UID: \"9ee35e2a-2967-4c76-92ec-2a7b5db87ba7\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-qb975" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.564800 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmdsv\" (UniqueName: \"kubernetes.io/projected/2c58e5fb-8d6a-4102-9191-ca69fb40e3bd-kube-api-access-bmdsv\") pod \"cert-manager-cainjector-7f985d654d-vr84r\" (UID: \"2c58e5fb-8d6a-4102-9191-ca69fb40e3bd\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-vr84r" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.586878 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmdsv\" (UniqueName: \"kubernetes.io/projected/2c58e5fb-8d6a-4102-9191-ca69fb40e3bd-kube-api-access-bmdsv\") pod \"cert-manager-cainjector-7f985d654d-vr84r\" (UID: \"2c58e5fb-8d6a-4102-9191-ca69fb40e3bd\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-vr84r" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.666334 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpgft\" (UniqueName: \"kubernetes.io/projected/1e49c6c3-130a-496a-88ae-6c4f9e1c8ea9-kube-api-access-bpgft\") pod \"cert-manager-5b446d88c5-b9r5p\" (UID: \"1e49c6c3-130a-496a-88ae-6c4f9e1c8ea9\") " pod="cert-manager/cert-manager-5b446d88c5-b9r5p" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.666451 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nf9b\" (UniqueName: \"kubernetes.io/projected/9ee35e2a-2967-4c76-92ec-2a7b5db87ba7-kube-api-access-6nf9b\") pod \"cert-manager-webhook-5655c58dd6-qb975\" (UID: \"9ee35e2a-2967-4c76-92ec-2a7b5db87ba7\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-qb975" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.676202 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-vr84r" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.706626 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nf9b\" (UniqueName: \"kubernetes.io/projected/9ee35e2a-2967-4c76-92ec-2a7b5db87ba7-kube-api-access-6nf9b\") pod \"cert-manager-webhook-5655c58dd6-qb975\" (UID: \"9ee35e2a-2967-4c76-92ec-2a7b5db87ba7\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-qb975" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.706626 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpgft\" (UniqueName: \"kubernetes.io/projected/1e49c6c3-130a-496a-88ae-6c4f9e1c8ea9-kube-api-access-bpgft\") pod \"cert-manager-5b446d88c5-b9r5p\" (UID: \"1e49c6c3-130a-496a-88ae-6c4f9e1c8ea9\") " pod="cert-manager/cert-manager-5b446d88c5-b9r5p" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.719698 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-qb975" Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.925213 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-vr84r"] Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.932816 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.961072 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-qb975"] Dec 02 16:55:06 crc kubenswrapper[4747]: W1202 16:55:06.965066 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ee35e2a_2967_4c76_92ec_2a7b5db87ba7.slice/crio-4848f763a0fd265bfc3392ece74ca712a022a89638ae42179ac231d2f85662aa WatchSource:0}: Error finding container 4848f763a0fd265bfc3392ece74ca712a022a89638ae42179ac231d2f85662aa: Status 404 returned error can't find the container with id 4848f763a0fd265bfc3392ece74ca712a022a89638ae42179ac231d2f85662aa Dec 02 16:55:06 crc kubenswrapper[4747]: I1202 16:55:06.992183 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-b9r5p" Dec 02 16:55:07 crc kubenswrapper[4747]: I1202 16:55:07.202096 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-b9r5p"] Dec 02 16:55:07 crc kubenswrapper[4747]: I1202 16:55:07.596834 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-qb975" event={"ID":"9ee35e2a-2967-4c76-92ec-2a7b5db87ba7","Type":"ContainerStarted","Data":"4848f763a0fd265bfc3392ece74ca712a022a89638ae42179ac231d2f85662aa"} Dec 02 16:55:07 crc kubenswrapper[4747]: I1202 16:55:07.598037 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-b9r5p" event={"ID":"1e49c6c3-130a-496a-88ae-6c4f9e1c8ea9","Type":"ContainerStarted","Data":"2a69c7c90dd8d0161e0304d0c5168433fd441ce8c842e821b31e35ce21e42be2"} Dec 02 16:55:07 crc kubenswrapper[4747]: I1202 16:55:07.598873 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-vr84r" event={"ID":"2c58e5fb-8d6a-4102-9191-ca69fb40e3bd","Type":"ContainerStarted","Data":"dd22b4e4143761c77c351ae4c832a5843317b68a20c00f19343630f2b1d2f1b3"} Dec 02 16:55:10 crc kubenswrapper[4747]: I1202 16:55:10.622924 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-vr84r" event={"ID":"2c58e5fb-8d6a-4102-9191-ca69fb40e3bd","Type":"ContainerStarted","Data":"334f08fb9f10431bfb2145e47f76b7190fd77373852bffd35f7e092c6415d896"} Dec 02 16:55:10 crc kubenswrapper[4747]: I1202 16:55:10.625692 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-b9r5p" event={"ID":"1e49c6c3-130a-496a-88ae-6c4f9e1c8ea9","Type":"ContainerStarted","Data":"6aad0081674fbc0cfddc50a0f6abd22fba84acbd4ea7c027c65a5c938650314f"} Dec 02 16:55:10 crc kubenswrapper[4747]: I1202 16:55:10.629305 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-qb975" event={"ID":"9ee35e2a-2967-4c76-92ec-2a7b5db87ba7","Type":"ContainerStarted","Data":"4c641c8b29911e852f83c5c88526ea8f878c809e7948e6758caad0402067a0f0"} Dec 02 16:55:10 crc kubenswrapper[4747]: I1202 16:55:10.629511 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-qb975" Dec 02 16:55:10 crc kubenswrapper[4747]: I1202 16:55:10.663868 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-qb975" podStartSLOduration=1.456200255 podStartE2EDuration="4.66384098s" podCreationTimestamp="2025-12-02 16:55:06 +0000 UTC" firstStartedPulling="2025-12-02 16:55:06.967239259 +0000 UTC m=+737.494128008" lastFinishedPulling="2025-12-02 16:55:10.174879994 +0000 UTC m=+740.701768733" observedRunningTime="2025-12-02 16:55:10.658586114 +0000 UTC m=+741.185474863" watchObservedRunningTime="2025-12-02 16:55:10.66384098 +0000 UTC m=+741.190729729" Dec 02 16:55:10 crc kubenswrapper[4747]: I1202 16:55:10.666812 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-vr84r" podStartSLOduration=1.301934769 podStartE2EDuration="4.666791312s" podCreationTimestamp="2025-12-02 16:55:06 +0000 UTC" firstStartedPulling="2025-12-02 16:55:06.932528342 +0000 UTC m=+737.459417091" lastFinishedPulling="2025-12-02 16:55:10.297384885 +0000 UTC m=+740.824273634" observedRunningTime="2025-12-02 16:55:10.643659078 +0000 UTC m=+741.170547827" watchObservedRunningTime="2025-12-02 16:55:10.666791312 +0000 UTC m=+741.193680061" Dec 02 16:55:10 crc kubenswrapper[4747]: I1202 16:55:10.685897 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-b9r5p" podStartSLOduration=1.538441065 podStartE2EDuration="4.685869684s" podCreationTimestamp="2025-12-02 16:55:06 +0000 UTC" firstStartedPulling="2025-12-02 16:55:07.213551447 +0000 UTC m=+737.740440186" lastFinishedPulling="2025-12-02 16:55:10.360980046 +0000 UTC m=+740.887868805" observedRunningTime="2025-12-02 16:55:10.68143657 +0000 UTC m=+741.208325319" watchObservedRunningTime="2025-12-02 16:55:10.685869684 +0000 UTC m=+741.212758433" Dec 02 16:55:16 crc kubenswrapper[4747]: I1202 16:55:16.723817 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-qb975" Dec 02 16:55:16 crc kubenswrapper[4747]: I1202 16:55:16.763074 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-zmcxm"] Dec 02 16:55:16 crc kubenswrapper[4747]: I1202 16:55:16.763769 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovn-controller" containerID="cri-o://3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1" gracePeriod=30 Dec 02 16:55:16 crc kubenswrapper[4747]: I1202 16:55:16.763896 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="nbdb" containerID="cri-o://6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137" gracePeriod=30 Dec 02 16:55:16 crc kubenswrapper[4747]: I1202 16:55:16.763989 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="northd" containerID="cri-o://da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c" gracePeriod=30 Dec 02 16:55:16 crc kubenswrapper[4747]: I1202 16:55:16.764062 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad" gracePeriod=30 Dec 02 16:55:16 crc kubenswrapper[4747]: I1202 16:55:16.764119 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="sbdb" containerID="cri-o://511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c" gracePeriod=30 Dec 02 16:55:16 crc kubenswrapper[4747]: I1202 16:55:16.764128 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="kube-rbac-proxy-node" containerID="cri-o://15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8" gracePeriod=30 Dec 02 16:55:16 crc kubenswrapper[4747]: I1202 16:55:16.764181 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovn-acl-logging" containerID="cri-o://e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091" gracePeriod=30 Dec 02 16:55:16 crc kubenswrapper[4747]: I1202 16:55:16.807630 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovnkube-controller" containerID="cri-o://3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595" gracePeriod=30 Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.127057 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zmcxm_b62a2b51-3b8a-4786-97ee-01d2c6332c83/ovnkube-controller/2.log" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.131472 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zmcxm_b62a2b51-3b8a-4786-97ee-01d2c6332c83/ovn-acl-logging/0.log" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.132248 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zmcxm_b62a2b51-3b8a-4786-97ee-01d2c6332c83/ovn-controller/0.log" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.132888 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.198460 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-l455b"] Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.198784 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.198804 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.198817 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="sbdb" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.198825 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="sbdb" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.198836 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovnkube-controller" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.198845 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovnkube-controller" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.198853 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="northd" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.198859 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="northd" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.198868 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="kubecfg-setup" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.198875 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="kubecfg-setup" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.198886 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovnkube-controller" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.198892 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovnkube-controller" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.198901 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovn-controller" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.198928 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovn-controller" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.198939 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovn-acl-logging" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.198945 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovn-acl-logging" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.198953 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="kube-rbac-proxy-node" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.198961 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="kube-rbac-proxy-node" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.198968 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovnkube-controller" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.198975 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovnkube-controller" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.198985 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="nbdb" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.198999 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="nbdb" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.199014 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovnkube-controller" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.199021 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovnkube-controller" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.199129 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="kube-rbac-proxy-node" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.199144 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="northd" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.199150 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="nbdb" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.199162 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovnkube-controller" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.199168 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovn-acl-logging" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.199177 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovnkube-controller" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.199186 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovnkube-controller" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.199194 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovn-controller" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.199202 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="sbdb" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.199209 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.199468 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerName="ovnkube-controller" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.203763 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.263248 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-env-overrides\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.263327 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-slash\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.263350 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-var-lib-cni-networks-ovn-kubernetes\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.263378 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xd24c\" (UniqueName: \"kubernetes.io/projected/b62a2b51-3b8a-4786-97ee-01d2c6332c83-kube-api-access-xd24c\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.263579 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-node-log\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.263636 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-node-log" (OuterVolumeSpecName: "node-log") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.263739 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-systemd-units\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.263765 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264051 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-slash" (OuterVolumeSpecName: "host-slash") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264143 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264087 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-run-netns\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264114 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264359 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-log-socket\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264470 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-cni-netd\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264472 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-log-socket" (OuterVolumeSpecName: "log-socket") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264554 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264590 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovn-node-metrics-cert\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264707 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-var-lib-openvswitch\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264742 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-cni-bin\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264766 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-kubelet\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264806 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-ovn\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264832 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264843 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-openvswitch\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264881 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264925 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-run-ovn-kubernetes\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264942 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264957 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264993 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264971 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.264972 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-systemd\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.265036 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.265118 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovnkube-script-lib\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.265170 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-etc-openvswitch\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.265231 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovnkube-config\") pod \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\" (UID: \"b62a2b51-3b8a-4786-97ee-01d2c6332c83\") " Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.265337 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.265613 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/370cabec-9cac-4f3f-96d8-eb681190c430-ovnkube-config\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.265724 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-kubelet\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.265825 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-cni-bin\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.265872 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.265969 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-etc-openvswitch\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.266031 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/370cabec-9cac-4f3f-96d8-eb681190c430-ovn-node-metrics-cert\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.266070 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-systemd-units\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.266128 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-run-ovn-kubernetes\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.266166 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-run-openvswitch\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.266247 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-slash\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.266283 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-run-ovn\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.266311 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-run-systemd\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.266336 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-node-log\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.266410 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-run-netns\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.266477 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/370cabec-9cac-4f3f-96d8-eb681190c430-ovnkube-script-lib\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.266567 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-log-socket\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.266598 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.266654 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/370cabec-9cac-4f3f-96d8-eb681190c430-env-overrides\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.266682 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvx2j\" (UniqueName: \"kubernetes.io/projected/370cabec-9cac-4f3f-96d8-eb681190c430-kube-api-access-kvx2j\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.266701 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-var-lib-openvswitch\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.266729 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-cni-netd\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.266863 4747 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.267307 4747 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.267399 4747 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.267424 4747 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.267441 4747 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.267482 4747 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.267503 4747 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.267515 4747 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.267530 4747 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.267544 4747 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-slash\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.267557 4747 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.267665 4747 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-node-log\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.267686 4747 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.267757 4747 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.267782 4747 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-log-socket\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.267794 4747 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.267863 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.271573 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b62a2b51-3b8a-4786-97ee-01d2c6332c83-kube-api-access-xd24c" (OuterVolumeSpecName: "kube-api-access-xd24c") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "kube-api-access-xd24c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.272619 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.280062 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "b62a2b51-3b8a-4786-97ee-01d2c6332c83" (UID: "b62a2b51-3b8a-4786-97ee-01d2c6332c83"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.369411 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-slash\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.369483 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-run-ovn\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.369516 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-node-log\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.369540 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-run-systemd\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.369568 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-run-netns\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.369598 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/370cabec-9cac-4f3f-96d8-eb681190c430-ovnkube-script-lib\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.369631 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-log-socket\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.369641 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-run-ovn\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.369695 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-run-netns\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.369741 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-log-socket\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.369711 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.369620 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-slash\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.369831 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-run-systemd\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.369655 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.369861 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-node-log\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370035 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/370cabec-9cac-4f3f-96d8-eb681190c430-env-overrides\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370077 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvx2j\" (UniqueName: \"kubernetes.io/projected/370cabec-9cac-4f3f-96d8-eb681190c430-kube-api-access-kvx2j\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370136 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-var-lib-openvswitch\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370199 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-cni-netd\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370268 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-var-lib-openvswitch\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370299 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/370cabec-9cac-4f3f-96d8-eb681190c430-ovnkube-config\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370340 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-cni-netd\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370405 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-kubelet\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370527 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-cni-bin\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370540 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-kubelet\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370576 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-etc-openvswitch\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370615 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-etc-openvswitch\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370626 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/370cabec-9cac-4f3f-96d8-eb681190c430-ovn-node-metrics-cert\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370690 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-systemd-units\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370730 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-run-ovn-kubernetes\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370764 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-run-openvswitch\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370774 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-run-ovn-kubernetes\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370787 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-systemd-units\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370726 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-host-cni-bin\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370815 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/370cabec-9cac-4f3f-96d8-eb681190c430-env-overrides\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370848 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/370cabec-9cac-4f3f-96d8-eb681190c430-run-openvswitch\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370841 4747 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b62a2b51-3b8a-4786-97ee-01d2c6332c83-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370893 4747 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370964 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xd24c\" (UniqueName: \"kubernetes.io/projected/b62a2b51-3b8a-4786-97ee-01d2c6332c83-kube-api-access-xd24c\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.370987 4747 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b62a2b51-3b8a-4786-97ee-01d2c6332c83-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.371279 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/370cabec-9cac-4f3f-96d8-eb681190c430-ovnkube-config\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.371442 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/370cabec-9cac-4f3f-96d8-eb681190c430-ovnkube-script-lib\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.374819 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/370cabec-9cac-4f3f-96d8-eb681190c430-ovn-node-metrics-cert\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.388877 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvx2j\" (UniqueName: \"kubernetes.io/projected/370cabec-9cac-4f3f-96d8-eb681190c430-kube-api-access-kvx2j\") pod \"ovnkube-node-l455b\" (UID: \"370cabec-9cac-4f3f-96d8-eb681190c430\") " pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.521463 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.680652 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" event={"ID":"370cabec-9cac-4f3f-96d8-eb681190c430","Type":"ContainerStarted","Data":"3951155c2e12488b98faa5ffa58d8fec6cf6ac3a129912ab23ae1f7274f44fb9"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.684048 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tsbf6_de9e6dbe-5eb1-40b7-8ddf-a8df9977153a/kube-multus/1.log" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.685068 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tsbf6_de9e6dbe-5eb1-40b7-8ddf-a8df9977153a/kube-multus/0.log" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.685153 4747 generic.go:334] "Generic (PLEG): container finished" podID="de9e6dbe-5eb1-40b7-8ddf-a8df9977153a" containerID="4427788e64004a199259aa6db948ace49dff52f17eeb698baf3cecfc17154cb5" exitCode=2 Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.685226 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tsbf6" event={"ID":"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a","Type":"ContainerDied","Data":"4427788e64004a199259aa6db948ace49dff52f17eeb698baf3cecfc17154cb5"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.685314 4747 scope.go:117] "RemoveContainer" containerID="80656a21b6e51e1a53e25ce0283c1c1b033c8cbfcdfb953bcb829f0de8782b17" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.686598 4747 scope.go:117] "RemoveContainer" containerID="4427788e64004a199259aa6db948ace49dff52f17eeb698baf3cecfc17154cb5" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.690484 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zmcxm_b62a2b51-3b8a-4786-97ee-01d2c6332c83/ovnkube-controller/2.log" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.699247 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zmcxm_b62a2b51-3b8a-4786-97ee-01d2c6332c83/ovn-acl-logging/0.log" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.700132 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zmcxm_b62a2b51-3b8a-4786-97ee-01d2c6332c83/ovn-controller/0.log" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701210 4747 generic.go:334] "Generic (PLEG): container finished" podID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerID="3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595" exitCode=0 Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701285 4747 generic.go:334] "Generic (PLEG): container finished" podID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerID="511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c" exitCode=0 Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701313 4747 generic.go:334] "Generic (PLEG): container finished" podID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerID="6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137" exitCode=0 Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701334 4747 generic.go:334] "Generic (PLEG): container finished" podID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerID="da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c" exitCode=0 Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701353 4747 generic.go:334] "Generic (PLEG): container finished" podID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerID="a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad" exitCode=0 Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701370 4747 generic.go:334] "Generic (PLEG): container finished" podID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerID="15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8" exitCode=0 Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701389 4747 generic.go:334] "Generic (PLEG): container finished" podID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerID="e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091" exitCode=143 Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701407 4747 generic.go:334] "Generic (PLEG): container finished" podID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" containerID="3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1" exitCode=143 Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701414 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerDied","Data":"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701459 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerDied","Data":"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701474 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerDied","Data":"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701680 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerDied","Data":"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701694 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerDied","Data":"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701712 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerDied","Data":"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701728 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701743 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701751 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701760 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701768 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701776 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701783 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701790 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701796 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701803 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701812 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerDied","Data":"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701825 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701834 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701841 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701848 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701855 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701862 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701869 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701876 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701882 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701889 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701897 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerDied","Data":"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701961 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701971 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701977 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701985 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701992 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701998 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.702005 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.702012 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.702018 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.702024 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.702034 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" event={"ID":"b62a2b51-3b8a-4786-97ee-01d2c6332c83","Type":"ContainerDied","Data":"276e654dfaa1bb645200da477315585256fdd1921fd69652581b6ef4f5976d23"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.702044 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.702054 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.702061 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.702067 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.702074 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.702081 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.702089 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.702095 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.702102 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.702108 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709"} Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.701376 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-zmcxm" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.736410 4747 scope.go:117] "RemoveContainer" containerID="3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.787821 4747 scope.go:117] "RemoveContainer" containerID="8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.797362 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-zmcxm"] Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.802196 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-zmcxm"] Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.822737 4747 scope.go:117] "RemoveContainer" containerID="511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.839530 4747 scope.go:117] "RemoveContainer" containerID="6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.857959 4747 scope.go:117] "RemoveContainer" containerID="da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.879407 4747 scope.go:117] "RemoveContainer" containerID="a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.917982 4747 scope.go:117] "RemoveContainer" containerID="15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.940370 4747 scope.go:117] "RemoveContainer" containerID="e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.955108 4747 scope.go:117] "RemoveContainer" containerID="3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.972029 4747 scope.go:117] "RemoveContainer" containerID="2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.987518 4747 scope.go:117] "RemoveContainer" containerID="3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.988317 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595\": container with ID starting with 3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595 not found: ID does not exist" containerID="3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.988389 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595"} err="failed to get container status \"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595\": rpc error: code = NotFound desc = could not find container \"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595\": container with ID starting with 3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595 not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.988436 4747 scope.go:117] "RemoveContainer" containerID="8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.988840 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\": container with ID starting with 8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a not found: ID does not exist" containerID="8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.988868 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a"} err="failed to get container status \"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\": rpc error: code = NotFound desc = could not find container \"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\": container with ID starting with 8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.988886 4747 scope.go:117] "RemoveContainer" containerID="511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.989384 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\": container with ID starting with 511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c not found: ID does not exist" containerID="511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.989422 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c"} err="failed to get container status \"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\": rpc error: code = NotFound desc = could not find container \"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\": container with ID starting with 511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.989453 4747 scope.go:117] "RemoveContainer" containerID="6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.989743 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\": container with ID starting with 6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137 not found: ID does not exist" containerID="6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.989799 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137"} err="failed to get container status \"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\": rpc error: code = NotFound desc = could not find container \"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\": container with ID starting with 6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137 not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.989814 4747 scope.go:117] "RemoveContainer" containerID="da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.990144 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\": container with ID starting with da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c not found: ID does not exist" containerID="da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.990176 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c"} err="failed to get container status \"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\": rpc error: code = NotFound desc = could not find container \"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\": container with ID starting with da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.990193 4747 scope.go:117] "RemoveContainer" containerID="a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.990474 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\": container with ID starting with a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad not found: ID does not exist" containerID="a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.990520 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad"} err="failed to get container status \"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\": rpc error: code = NotFound desc = could not find container \"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\": container with ID starting with a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.990540 4747 scope.go:117] "RemoveContainer" containerID="15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.990840 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\": container with ID starting with 15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8 not found: ID does not exist" containerID="15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.990872 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8"} err="failed to get container status \"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\": rpc error: code = NotFound desc = could not find container \"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\": container with ID starting with 15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8 not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.990890 4747 scope.go:117] "RemoveContainer" containerID="e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.991492 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\": container with ID starting with e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091 not found: ID does not exist" containerID="e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.991552 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091"} err="failed to get container status \"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\": rpc error: code = NotFound desc = could not find container \"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\": container with ID starting with e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091 not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.991603 4747 scope.go:117] "RemoveContainer" containerID="3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.992039 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\": container with ID starting with 3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1 not found: ID does not exist" containerID="3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.992089 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1"} err="failed to get container status \"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\": rpc error: code = NotFound desc = could not find container \"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\": container with ID starting with 3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1 not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.992109 4747 scope.go:117] "RemoveContainer" containerID="2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709" Dec 02 16:55:17 crc kubenswrapper[4747]: E1202 16:55:17.992482 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\": container with ID starting with 2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709 not found: ID does not exist" containerID="2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.992521 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709"} err="failed to get container status \"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\": rpc error: code = NotFound desc = could not find container \"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\": container with ID starting with 2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709 not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.992567 4747 scope.go:117] "RemoveContainer" containerID="3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.992962 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595"} err="failed to get container status \"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595\": rpc error: code = NotFound desc = could not find container \"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595\": container with ID starting with 3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595 not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.992986 4747 scope.go:117] "RemoveContainer" containerID="8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.993276 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a"} err="failed to get container status \"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\": rpc error: code = NotFound desc = could not find container \"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\": container with ID starting with 8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.993296 4747 scope.go:117] "RemoveContainer" containerID="511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.993569 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c"} err="failed to get container status \"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\": rpc error: code = NotFound desc = could not find container \"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\": container with ID starting with 511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.993590 4747 scope.go:117] "RemoveContainer" containerID="6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.993850 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137"} err="failed to get container status \"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\": rpc error: code = NotFound desc = could not find container \"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\": container with ID starting with 6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137 not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.993871 4747 scope.go:117] "RemoveContainer" containerID="da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.994296 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c"} err="failed to get container status \"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\": rpc error: code = NotFound desc = could not find container \"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\": container with ID starting with da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.994330 4747 scope.go:117] "RemoveContainer" containerID="a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.994605 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad"} err="failed to get container status \"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\": rpc error: code = NotFound desc = could not find container \"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\": container with ID starting with a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.994632 4747 scope.go:117] "RemoveContainer" containerID="15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.994890 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8"} err="failed to get container status \"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\": rpc error: code = NotFound desc = could not find container \"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\": container with ID starting with 15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8 not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.994992 4747 scope.go:117] "RemoveContainer" containerID="e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.995236 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091"} err="failed to get container status \"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\": rpc error: code = NotFound desc = could not find container \"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\": container with ID starting with e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091 not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.995263 4747 scope.go:117] "RemoveContainer" containerID="3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.995587 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1"} err="failed to get container status \"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\": rpc error: code = NotFound desc = could not find container \"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\": container with ID starting with 3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1 not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.995611 4747 scope.go:117] "RemoveContainer" containerID="2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.995947 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709"} err="failed to get container status \"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\": rpc error: code = NotFound desc = could not find container \"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\": container with ID starting with 2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709 not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.995974 4747 scope.go:117] "RemoveContainer" containerID="3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.996223 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595"} err="failed to get container status \"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595\": rpc error: code = NotFound desc = could not find container \"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595\": container with ID starting with 3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595 not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.996247 4747 scope.go:117] "RemoveContainer" containerID="8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.996657 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a"} err="failed to get container status \"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\": rpc error: code = NotFound desc = could not find container \"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\": container with ID starting with 8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.996697 4747 scope.go:117] "RemoveContainer" containerID="511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.996997 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c"} err="failed to get container status \"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\": rpc error: code = NotFound desc = could not find container \"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\": container with ID starting with 511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.997026 4747 scope.go:117] "RemoveContainer" containerID="6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.997408 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137"} err="failed to get container status \"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\": rpc error: code = NotFound desc = could not find container \"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\": container with ID starting with 6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137 not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.997454 4747 scope.go:117] "RemoveContainer" containerID="da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.997796 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c"} err="failed to get container status \"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\": rpc error: code = NotFound desc = could not find container \"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\": container with ID starting with da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.997847 4747 scope.go:117] "RemoveContainer" containerID="a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.998411 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad"} err="failed to get container status \"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\": rpc error: code = NotFound desc = could not find container \"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\": container with ID starting with a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.998433 4747 scope.go:117] "RemoveContainer" containerID="15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.998859 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8"} err="failed to get container status \"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\": rpc error: code = NotFound desc = could not find container \"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\": container with ID starting with 15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8 not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.999190 4747 scope.go:117] "RemoveContainer" containerID="e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.999620 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091"} err="failed to get container status \"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\": rpc error: code = NotFound desc = could not find container \"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\": container with ID starting with e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091 not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.999643 4747 scope.go:117] "RemoveContainer" containerID="3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.999937 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1"} err="failed to get container status \"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\": rpc error: code = NotFound desc = could not find container \"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\": container with ID starting with 3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1 not found: ID does not exist" Dec 02 16:55:17 crc kubenswrapper[4747]: I1202 16:55:17.999963 4747 scope.go:117] "RemoveContainer" containerID="2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.000295 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709"} err="failed to get container status \"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\": rpc error: code = NotFound desc = could not find container \"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\": container with ID starting with 2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709 not found: ID does not exist" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.000326 4747 scope.go:117] "RemoveContainer" containerID="3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.000642 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595"} err="failed to get container status \"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595\": rpc error: code = NotFound desc = could not find container \"3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595\": container with ID starting with 3452d3e9e5e3e80e19089247e0fc9cd7d44ba82704b8900c1fb2298f4fb22595 not found: ID does not exist" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.000669 4747 scope.go:117] "RemoveContainer" containerID="8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.000999 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a"} err="failed to get container status \"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\": rpc error: code = NotFound desc = could not find container \"8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a\": container with ID starting with 8da6090b84f2ed432b82cd4489a517f1ec382bb7d6bea69794fe44fac3c74e6a not found: ID does not exist" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.001078 4747 scope.go:117] "RemoveContainer" containerID="511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.001566 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c"} err="failed to get container status \"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\": rpc error: code = NotFound desc = could not find container \"511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c\": container with ID starting with 511a994d637c8854c1b8818b692fad7f8b96234573c016a6f78299fb8e056b0c not found: ID does not exist" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.001594 4747 scope.go:117] "RemoveContainer" containerID="6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.001872 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137"} err="failed to get container status \"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\": rpc error: code = NotFound desc = could not find container \"6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137\": container with ID starting with 6fd10de08c266b1b6bcc702e668ef2c79f457e33b976cfd30bfd68c44a18a137 not found: ID does not exist" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.001899 4747 scope.go:117] "RemoveContainer" containerID="da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.002185 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c"} err="failed to get container status \"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\": rpc error: code = NotFound desc = could not find container \"da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c\": container with ID starting with da91e3881e6bbf7d556d0391d3b98307242dc3535af6ec10eb9349a0f6f50f1c not found: ID does not exist" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.002213 4747 scope.go:117] "RemoveContainer" containerID="a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.002616 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad"} err="failed to get container status \"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\": rpc error: code = NotFound desc = could not find container \"a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad\": container with ID starting with a91f5546929eb4083b0072dea8914b0988e39cf90c5d5a7596a5bf2c4d0289ad not found: ID does not exist" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.002641 4747 scope.go:117] "RemoveContainer" containerID="15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.002958 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8"} err="failed to get container status \"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\": rpc error: code = NotFound desc = could not find container \"15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8\": container with ID starting with 15135654c41b2f96187a29c3bcf6f027b599db92ea715741e37dfbe8350db4a8 not found: ID does not exist" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.003026 4747 scope.go:117] "RemoveContainer" containerID="e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.003399 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091"} err="failed to get container status \"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\": rpc error: code = NotFound desc = could not find container \"e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091\": container with ID starting with e2fff85ba8aa32c33a69494ddcc2853eaaeac9cac51768fa7888ac24a8a57091 not found: ID does not exist" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.003427 4747 scope.go:117] "RemoveContainer" containerID="3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.003767 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1"} err="failed to get container status \"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\": rpc error: code = NotFound desc = could not find container \"3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1\": container with ID starting with 3e26601a90d888397c1d1181fca7ab5630f56afc4c580cacfa190d2169e6b9d1 not found: ID does not exist" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.003797 4747 scope.go:117] "RemoveContainer" containerID="2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.004170 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709"} err="failed to get container status \"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\": rpc error: code = NotFound desc = could not find container \"2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709\": container with ID starting with 2e0d4ad2a4d0c6810e22a3909e9f50322f21bbc7408cb0312ed6f74c4e468709 not found: ID does not exist" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.734705 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-tsbf6_de9e6dbe-5eb1-40b7-8ddf-a8df9977153a/kube-multus/1.log" Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.734903 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-tsbf6" event={"ID":"de9e6dbe-5eb1-40b7-8ddf-a8df9977153a","Type":"ContainerStarted","Data":"331d1e4898d1454b399d9dd1b12e06e2c064ac86b6ddaded3d6c9b4934d72996"} Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.742372 4747 generic.go:334] "Generic (PLEG): container finished" podID="370cabec-9cac-4f3f-96d8-eb681190c430" containerID="fb622b796f8e405156fd0ac0bfe1d3a12cab41cfdd02659208d5ab85b78edcd7" exitCode=0 Dec 02 16:55:18 crc kubenswrapper[4747]: I1202 16:55:18.742453 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" event={"ID":"370cabec-9cac-4f3f-96d8-eb681190c430","Type":"ContainerDied","Data":"fb622b796f8e405156fd0ac0bfe1d3a12cab41cfdd02659208d5ab85b78edcd7"} Dec 02 16:55:19 crc kubenswrapper[4747]: I1202 16:55:19.754209 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" event={"ID":"370cabec-9cac-4f3f-96d8-eb681190c430","Type":"ContainerStarted","Data":"610ddcd0362627520db803b1907110954f281d7f8e1dccdb22435d76bc8d6558"} Dec 02 16:55:19 crc kubenswrapper[4747]: I1202 16:55:19.755089 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" event={"ID":"370cabec-9cac-4f3f-96d8-eb681190c430","Type":"ContainerStarted","Data":"a238264e246330844cc64a860b4b5fa3adc10a1ca9280035c3c9a6f43ac3d68c"} Dec 02 16:55:19 crc kubenswrapper[4747]: I1202 16:55:19.755110 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" event={"ID":"370cabec-9cac-4f3f-96d8-eb681190c430","Type":"ContainerStarted","Data":"55f205f593809583f6daca3ebee40e325625083162d7823639b74dafa87ba57d"} Dec 02 16:55:19 crc kubenswrapper[4747]: I1202 16:55:19.755125 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" event={"ID":"370cabec-9cac-4f3f-96d8-eb681190c430","Type":"ContainerStarted","Data":"e17965171f22dedee407b5859c18acef437d74384fe6bbb4d1cb1ef0c842c346"} Dec 02 16:55:19 crc kubenswrapper[4747]: I1202 16:55:19.755138 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" event={"ID":"370cabec-9cac-4f3f-96d8-eb681190c430","Type":"ContainerStarted","Data":"e29569c365337aab20479a245bbc1bc3e82b3ec9ebd92ce61d31dd368b6bac00"} Dec 02 16:55:19 crc kubenswrapper[4747]: I1202 16:55:19.755151 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" event={"ID":"370cabec-9cac-4f3f-96d8-eb681190c430","Type":"ContainerStarted","Data":"2d44cdb474c6c218763a6dce2957469c0b994f5f8248adfdfc88d395f2f8eb63"} Dec 02 16:55:19 crc kubenswrapper[4747]: I1202 16:55:19.780993 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b62a2b51-3b8a-4786-97ee-01d2c6332c83" path="/var/lib/kubelet/pods/b62a2b51-3b8a-4786-97ee-01d2c6332c83/volumes" Dec 02 16:55:22 crc kubenswrapper[4747]: I1202 16:55:22.794339 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" event={"ID":"370cabec-9cac-4f3f-96d8-eb681190c430","Type":"ContainerStarted","Data":"9143177d6afdad6ab97bb1cb2431b95ac54048ad2d381f8c92b42945eebe7c5a"} Dec 02 16:55:25 crc kubenswrapper[4747]: I1202 16:55:25.818370 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" event={"ID":"370cabec-9cac-4f3f-96d8-eb681190c430","Type":"ContainerStarted","Data":"401317e2d2bd7219b2d6f457d899bca7233b9f5754d2382c0e4eea3a032750ac"} Dec 02 16:55:25 crc kubenswrapper[4747]: I1202 16:55:25.818818 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:25 crc kubenswrapper[4747]: I1202 16:55:25.848491 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" podStartSLOduration=8.848467867 podStartE2EDuration="8.848467867s" podCreationTimestamp="2025-12-02 16:55:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:55:25.848410705 +0000 UTC m=+756.375299464" watchObservedRunningTime="2025-12-02 16:55:25.848467867 +0000 UTC m=+756.375356616" Dec 02 16:55:25 crc kubenswrapper[4747]: I1202 16:55:25.849752 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:26 crc kubenswrapper[4747]: I1202 16:55:26.825259 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:26 crc kubenswrapper[4747]: I1202 16:55:26.825324 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:26 crc kubenswrapper[4747]: I1202 16:55:26.853214 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:47 crc kubenswrapper[4747]: I1202 16:55:47.558855 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l455b" Dec 02 16:55:56 crc kubenswrapper[4747]: I1202 16:55:56.740463 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs"] Dec 02 16:55:56 crc kubenswrapper[4747]: I1202 16:55:56.743893 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" Dec 02 16:55:56 crc kubenswrapper[4747]: I1202 16:55:56.750366 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 02 16:55:56 crc kubenswrapper[4747]: I1202 16:55:56.755017 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs"] Dec 02 16:55:56 crc kubenswrapper[4747]: I1202 16:55:56.872530 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szzpc\" (UniqueName: \"kubernetes.io/projected/b34ed569-9800-4521-bbd9-7e4249513755-kube-api-access-szzpc\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs\" (UID: \"b34ed569-9800-4521-bbd9-7e4249513755\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" Dec 02 16:55:56 crc kubenswrapper[4747]: I1202 16:55:56.872603 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b34ed569-9800-4521-bbd9-7e4249513755-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs\" (UID: \"b34ed569-9800-4521-bbd9-7e4249513755\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" Dec 02 16:55:56 crc kubenswrapper[4747]: I1202 16:55:56.872653 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b34ed569-9800-4521-bbd9-7e4249513755-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs\" (UID: \"b34ed569-9800-4521-bbd9-7e4249513755\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" Dec 02 16:55:56 crc kubenswrapper[4747]: I1202 16:55:56.974412 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szzpc\" (UniqueName: \"kubernetes.io/projected/b34ed569-9800-4521-bbd9-7e4249513755-kube-api-access-szzpc\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs\" (UID: \"b34ed569-9800-4521-bbd9-7e4249513755\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" Dec 02 16:55:56 crc kubenswrapper[4747]: I1202 16:55:56.974498 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b34ed569-9800-4521-bbd9-7e4249513755-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs\" (UID: \"b34ed569-9800-4521-bbd9-7e4249513755\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" Dec 02 16:55:56 crc kubenswrapper[4747]: I1202 16:55:56.974528 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b34ed569-9800-4521-bbd9-7e4249513755-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs\" (UID: \"b34ed569-9800-4521-bbd9-7e4249513755\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" Dec 02 16:55:56 crc kubenswrapper[4747]: I1202 16:55:56.975179 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b34ed569-9800-4521-bbd9-7e4249513755-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs\" (UID: \"b34ed569-9800-4521-bbd9-7e4249513755\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" Dec 02 16:55:56 crc kubenswrapper[4747]: I1202 16:55:56.975506 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b34ed569-9800-4521-bbd9-7e4249513755-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs\" (UID: \"b34ed569-9800-4521-bbd9-7e4249513755\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" Dec 02 16:55:57 crc kubenswrapper[4747]: I1202 16:55:57.012935 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szzpc\" (UniqueName: \"kubernetes.io/projected/b34ed569-9800-4521-bbd9-7e4249513755-kube-api-access-szzpc\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs\" (UID: \"b34ed569-9800-4521-bbd9-7e4249513755\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" Dec 02 16:55:57 crc kubenswrapper[4747]: I1202 16:55:57.060870 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" Dec 02 16:55:57 crc kubenswrapper[4747]: I1202 16:55:57.288374 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs"] Dec 02 16:55:58 crc kubenswrapper[4747]: I1202 16:55:58.064440 4747 generic.go:334] "Generic (PLEG): container finished" podID="b34ed569-9800-4521-bbd9-7e4249513755" containerID="4d12e13051621a307e4b52903810b693ae37e7852f282999ffe58dd14fad5c50" exitCode=0 Dec 02 16:55:58 crc kubenswrapper[4747]: I1202 16:55:58.064546 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" event={"ID":"b34ed569-9800-4521-bbd9-7e4249513755","Type":"ContainerDied","Data":"4d12e13051621a307e4b52903810b693ae37e7852f282999ffe58dd14fad5c50"} Dec 02 16:55:58 crc kubenswrapper[4747]: I1202 16:55:58.064880 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" event={"ID":"b34ed569-9800-4521-bbd9-7e4249513755","Type":"ContainerStarted","Data":"aefd02373e15d8a6ef51a192c54e22ec17a2e91dfbaeda0988ab26630796d762"} Dec 02 16:55:58 crc kubenswrapper[4747]: I1202 16:55:58.919530 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fdxtf"] Dec 02 16:55:58 crc kubenswrapper[4747]: I1202 16:55:58.920876 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fdxtf" Dec 02 16:55:58 crc kubenswrapper[4747]: I1202 16:55:58.943035 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fdxtf"] Dec 02 16:55:59 crc kubenswrapper[4747]: I1202 16:55:59.106781 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4trvc\" (UniqueName: \"kubernetes.io/projected/a0ff08da-7c85-4998-8c5a-12f46a06e892-kube-api-access-4trvc\") pod \"redhat-operators-fdxtf\" (UID: \"a0ff08da-7c85-4998-8c5a-12f46a06e892\") " pod="openshift-marketplace/redhat-operators-fdxtf" Dec 02 16:55:59 crc kubenswrapper[4747]: I1202 16:55:59.106864 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0ff08da-7c85-4998-8c5a-12f46a06e892-utilities\") pod \"redhat-operators-fdxtf\" (UID: \"a0ff08da-7c85-4998-8c5a-12f46a06e892\") " pod="openshift-marketplace/redhat-operators-fdxtf" Dec 02 16:55:59 crc kubenswrapper[4747]: I1202 16:55:59.106901 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0ff08da-7c85-4998-8c5a-12f46a06e892-catalog-content\") pod \"redhat-operators-fdxtf\" (UID: \"a0ff08da-7c85-4998-8c5a-12f46a06e892\") " pod="openshift-marketplace/redhat-operators-fdxtf" Dec 02 16:55:59 crc kubenswrapper[4747]: I1202 16:55:59.208486 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0ff08da-7c85-4998-8c5a-12f46a06e892-utilities\") pod \"redhat-operators-fdxtf\" (UID: \"a0ff08da-7c85-4998-8c5a-12f46a06e892\") " pod="openshift-marketplace/redhat-operators-fdxtf" Dec 02 16:55:59 crc kubenswrapper[4747]: I1202 16:55:59.208567 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0ff08da-7c85-4998-8c5a-12f46a06e892-catalog-content\") pod \"redhat-operators-fdxtf\" (UID: \"a0ff08da-7c85-4998-8c5a-12f46a06e892\") " pod="openshift-marketplace/redhat-operators-fdxtf" Dec 02 16:55:59 crc kubenswrapper[4747]: I1202 16:55:59.208663 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4trvc\" (UniqueName: \"kubernetes.io/projected/a0ff08da-7c85-4998-8c5a-12f46a06e892-kube-api-access-4trvc\") pod \"redhat-operators-fdxtf\" (UID: \"a0ff08da-7c85-4998-8c5a-12f46a06e892\") " pod="openshift-marketplace/redhat-operators-fdxtf" Dec 02 16:55:59 crc kubenswrapper[4747]: I1202 16:55:59.209355 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0ff08da-7c85-4998-8c5a-12f46a06e892-utilities\") pod \"redhat-operators-fdxtf\" (UID: \"a0ff08da-7c85-4998-8c5a-12f46a06e892\") " pod="openshift-marketplace/redhat-operators-fdxtf" Dec 02 16:55:59 crc kubenswrapper[4747]: I1202 16:55:59.209383 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0ff08da-7c85-4998-8c5a-12f46a06e892-catalog-content\") pod \"redhat-operators-fdxtf\" (UID: \"a0ff08da-7c85-4998-8c5a-12f46a06e892\") " pod="openshift-marketplace/redhat-operators-fdxtf" Dec 02 16:55:59 crc kubenswrapper[4747]: I1202 16:55:59.232228 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4trvc\" (UniqueName: \"kubernetes.io/projected/a0ff08da-7c85-4998-8c5a-12f46a06e892-kube-api-access-4trvc\") pod \"redhat-operators-fdxtf\" (UID: \"a0ff08da-7c85-4998-8c5a-12f46a06e892\") " pod="openshift-marketplace/redhat-operators-fdxtf" Dec 02 16:55:59 crc kubenswrapper[4747]: I1202 16:55:59.252436 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fdxtf" Dec 02 16:55:59 crc kubenswrapper[4747]: I1202 16:55:59.742667 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fdxtf"] Dec 02 16:55:59 crc kubenswrapper[4747]: W1202 16:55:59.751881 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda0ff08da_7c85_4998_8c5a_12f46a06e892.slice/crio-8a20c5a6059553d3b9f31ffe8bca7339f8a190fe478fa94f6c5308b87b876cc9 WatchSource:0}: Error finding container 8a20c5a6059553d3b9f31ffe8bca7339f8a190fe478fa94f6c5308b87b876cc9: Status 404 returned error can't find the container with id 8a20c5a6059553d3b9f31ffe8bca7339f8a190fe478fa94f6c5308b87b876cc9 Dec 02 16:56:00 crc kubenswrapper[4747]: I1202 16:56:00.085894 4747 generic.go:334] "Generic (PLEG): container finished" podID="b34ed569-9800-4521-bbd9-7e4249513755" containerID="f81132a467b065455aa69c43a42c9fb1720f3e02916c6a17d6a13636be02c48f" exitCode=0 Dec 02 16:56:00 crc kubenswrapper[4747]: I1202 16:56:00.086252 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" event={"ID":"b34ed569-9800-4521-bbd9-7e4249513755","Type":"ContainerDied","Data":"f81132a467b065455aa69c43a42c9fb1720f3e02916c6a17d6a13636be02c48f"} Dec 02 16:56:00 crc kubenswrapper[4747]: I1202 16:56:00.088465 4747 generic.go:334] "Generic (PLEG): container finished" podID="a0ff08da-7c85-4998-8c5a-12f46a06e892" containerID="acb6628a48da2ac3f46b5c1efa53babd41a5f70ba1c9aae4474f456ffb1061cb" exitCode=0 Dec 02 16:56:00 crc kubenswrapper[4747]: I1202 16:56:00.088565 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdxtf" event={"ID":"a0ff08da-7c85-4998-8c5a-12f46a06e892","Type":"ContainerDied","Data":"acb6628a48da2ac3f46b5c1efa53babd41a5f70ba1c9aae4474f456ffb1061cb"} Dec 02 16:56:00 crc kubenswrapper[4747]: I1202 16:56:00.088688 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdxtf" event={"ID":"a0ff08da-7c85-4998-8c5a-12f46a06e892","Type":"ContainerStarted","Data":"8a20c5a6059553d3b9f31ffe8bca7339f8a190fe478fa94f6c5308b87b876cc9"} Dec 02 16:56:01 crc kubenswrapper[4747]: I1202 16:56:01.096602 4747 generic.go:334] "Generic (PLEG): container finished" podID="b34ed569-9800-4521-bbd9-7e4249513755" containerID="83659d7ce9e72d87776710f2f67dcdd70bdbd89d02e7f94107e7a5ed8919fc74" exitCode=0 Dec 02 16:56:01 crc kubenswrapper[4747]: I1202 16:56:01.096650 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" event={"ID":"b34ed569-9800-4521-bbd9-7e4249513755","Type":"ContainerDied","Data":"83659d7ce9e72d87776710f2f67dcdd70bdbd89d02e7f94107e7a5ed8919fc74"} Dec 02 16:56:01 crc kubenswrapper[4747]: I1202 16:56:01.099233 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdxtf" event={"ID":"a0ff08da-7c85-4998-8c5a-12f46a06e892","Type":"ContainerStarted","Data":"e3b2b71d7e27ac3c2ed621dde61c2f6b2283377e44c5b87c424f8fcc5f73b9ac"} Dec 02 16:56:01 crc kubenswrapper[4747]: I1202 16:56:01.795389 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 16:56:01 crc kubenswrapper[4747]: I1202 16:56:01.795485 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 16:56:02 crc kubenswrapper[4747]: I1202 16:56:02.110359 4747 generic.go:334] "Generic (PLEG): container finished" podID="a0ff08da-7c85-4998-8c5a-12f46a06e892" containerID="e3b2b71d7e27ac3c2ed621dde61c2f6b2283377e44c5b87c424f8fcc5f73b9ac" exitCode=0 Dec 02 16:56:02 crc kubenswrapper[4747]: I1202 16:56:02.110533 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdxtf" event={"ID":"a0ff08da-7c85-4998-8c5a-12f46a06e892","Type":"ContainerDied","Data":"e3b2b71d7e27ac3c2ed621dde61c2f6b2283377e44c5b87c424f8fcc5f73b9ac"} Dec 02 16:56:02 crc kubenswrapper[4747]: I1202 16:56:02.380341 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" Dec 02 16:56:02 crc kubenswrapper[4747]: I1202 16:56:02.461873 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szzpc\" (UniqueName: \"kubernetes.io/projected/b34ed569-9800-4521-bbd9-7e4249513755-kube-api-access-szzpc\") pod \"b34ed569-9800-4521-bbd9-7e4249513755\" (UID: \"b34ed569-9800-4521-bbd9-7e4249513755\") " Dec 02 16:56:02 crc kubenswrapper[4747]: I1202 16:56:02.462114 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b34ed569-9800-4521-bbd9-7e4249513755-util\") pod \"b34ed569-9800-4521-bbd9-7e4249513755\" (UID: \"b34ed569-9800-4521-bbd9-7e4249513755\") " Dec 02 16:56:02 crc kubenswrapper[4747]: I1202 16:56:02.463399 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b34ed569-9800-4521-bbd9-7e4249513755-bundle\") pod \"b34ed569-9800-4521-bbd9-7e4249513755\" (UID: \"b34ed569-9800-4521-bbd9-7e4249513755\") " Dec 02 16:56:02 crc kubenswrapper[4747]: I1202 16:56:02.464093 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b34ed569-9800-4521-bbd9-7e4249513755-bundle" (OuterVolumeSpecName: "bundle") pod "b34ed569-9800-4521-bbd9-7e4249513755" (UID: "b34ed569-9800-4521-bbd9-7e4249513755"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:56:02 crc kubenswrapper[4747]: I1202 16:56:02.469858 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b34ed569-9800-4521-bbd9-7e4249513755-kube-api-access-szzpc" (OuterVolumeSpecName: "kube-api-access-szzpc") pod "b34ed569-9800-4521-bbd9-7e4249513755" (UID: "b34ed569-9800-4521-bbd9-7e4249513755"). InnerVolumeSpecName "kube-api-access-szzpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:56:02 crc kubenswrapper[4747]: I1202 16:56:02.478462 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b34ed569-9800-4521-bbd9-7e4249513755-util" (OuterVolumeSpecName: "util") pod "b34ed569-9800-4521-bbd9-7e4249513755" (UID: "b34ed569-9800-4521-bbd9-7e4249513755"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:56:02 crc kubenswrapper[4747]: I1202 16:56:02.565814 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szzpc\" (UniqueName: \"kubernetes.io/projected/b34ed569-9800-4521-bbd9-7e4249513755-kube-api-access-szzpc\") on node \"crc\" DevicePath \"\"" Dec 02 16:56:02 crc kubenswrapper[4747]: I1202 16:56:02.565848 4747 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b34ed569-9800-4521-bbd9-7e4249513755-util\") on node \"crc\" DevicePath \"\"" Dec 02 16:56:02 crc kubenswrapper[4747]: I1202 16:56:02.565859 4747 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b34ed569-9800-4521-bbd9-7e4249513755-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 16:56:03 crc kubenswrapper[4747]: I1202 16:56:03.120960 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" event={"ID":"b34ed569-9800-4521-bbd9-7e4249513755","Type":"ContainerDied","Data":"aefd02373e15d8a6ef51a192c54e22ec17a2e91dfbaeda0988ab26630796d762"} Dec 02 16:56:03 crc kubenswrapper[4747]: I1202 16:56:03.122952 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aefd02373e15d8a6ef51a192c54e22ec17a2e91dfbaeda0988ab26630796d762" Dec 02 16:56:03 crc kubenswrapper[4747]: I1202 16:56:03.120979 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs" Dec 02 16:56:03 crc kubenswrapper[4747]: I1202 16:56:03.123543 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdxtf" event={"ID":"a0ff08da-7c85-4998-8c5a-12f46a06e892","Type":"ContainerStarted","Data":"4844cae4ea09b0a14ea596714b954996d90594483d896af4f9f023f6ea80bea2"} Dec 02 16:56:03 crc kubenswrapper[4747]: I1202 16:56:03.161866 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fdxtf" podStartSLOduration=2.747356724 podStartE2EDuration="5.161834047s" podCreationTimestamp="2025-12-02 16:55:58 +0000 UTC" firstStartedPulling="2025-12-02 16:56:00.090827731 +0000 UTC m=+790.617716480" lastFinishedPulling="2025-12-02 16:56:02.505305054 +0000 UTC m=+793.032193803" observedRunningTime="2025-12-02 16:56:03.151277324 +0000 UTC m=+793.678166113" watchObservedRunningTime="2025-12-02 16:56:03.161834047 +0000 UTC m=+793.688722836" Dec 02 16:56:04 crc kubenswrapper[4747]: I1202 16:56:04.125402 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-8sr7z"] Dec 02 16:56:04 crc kubenswrapper[4747]: E1202 16:56:04.126436 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b34ed569-9800-4521-bbd9-7e4249513755" containerName="extract" Dec 02 16:56:04 crc kubenswrapper[4747]: I1202 16:56:04.126460 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b34ed569-9800-4521-bbd9-7e4249513755" containerName="extract" Dec 02 16:56:04 crc kubenswrapper[4747]: E1202 16:56:04.126507 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b34ed569-9800-4521-bbd9-7e4249513755" containerName="pull" Dec 02 16:56:04 crc kubenswrapper[4747]: I1202 16:56:04.126519 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b34ed569-9800-4521-bbd9-7e4249513755" containerName="pull" Dec 02 16:56:04 crc kubenswrapper[4747]: E1202 16:56:04.126531 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b34ed569-9800-4521-bbd9-7e4249513755" containerName="util" Dec 02 16:56:04 crc kubenswrapper[4747]: I1202 16:56:04.126556 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b34ed569-9800-4521-bbd9-7e4249513755" containerName="util" Dec 02 16:56:04 crc kubenswrapper[4747]: I1202 16:56:04.126854 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b34ed569-9800-4521-bbd9-7e4249513755" containerName="extract" Dec 02 16:56:04 crc kubenswrapper[4747]: I1202 16:56:04.127712 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8sr7z" Dec 02 16:56:04 crc kubenswrapper[4747]: I1202 16:56:04.139682 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 02 16:56:04 crc kubenswrapper[4747]: I1202 16:56:04.139946 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-dmxxx" Dec 02 16:56:04 crc kubenswrapper[4747]: I1202 16:56:04.140011 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 02 16:56:04 crc kubenswrapper[4747]: I1202 16:56:04.148606 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-8sr7z"] Dec 02 16:56:04 crc kubenswrapper[4747]: I1202 16:56:04.190159 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbbnr\" (UniqueName: \"kubernetes.io/projected/b1fa5588-eede-4bb2-8b51-98f2582557bd-kube-api-access-dbbnr\") pod \"nmstate-operator-5b5b58f5c8-8sr7z\" (UID: \"b1fa5588-eede-4bb2-8b51-98f2582557bd\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8sr7z" Dec 02 16:56:04 crc kubenswrapper[4747]: I1202 16:56:04.291803 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbbnr\" (UniqueName: \"kubernetes.io/projected/b1fa5588-eede-4bb2-8b51-98f2582557bd-kube-api-access-dbbnr\") pod \"nmstate-operator-5b5b58f5c8-8sr7z\" (UID: \"b1fa5588-eede-4bb2-8b51-98f2582557bd\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8sr7z" Dec 02 16:56:04 crc kubenswrapper[4747]: I1202 16:56:04.313283 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbbnr\" (UniqueName: \"kubernetes.io/projected/b1fa5588-eede-4bb2-8b51-98f2582557bd-kube-api-access-dbbnr\") pod \"nmstate-operator-5b5b58f5c8-8sr7z\" (UID: \"b1fa5588-eede-4bb2-8b51-98f2582557bd\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8sr7z" Dec 02 16:56:04 crc kubenswrapper[4747]: I1202 16:56:04.453326 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8sr7z" Dec 02 16:56:04 crc kubenswrapper[4747]: I1202 16:56:04.741702 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-8sr7z"] Dec 02 16:56:05 crc kubenswrapper[4747]: I1202 16:56:05.138182 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8sr7z" event={"ID":"b1fa5588-eede-4bb2-8b51-98f2582557bd","Type":"ContainerStarted","Data":"28ac928830cdcde5390988a6bbf08d07bb0b2d57932b9a0969ecd579f02f202d"} Dec 02 16:56:08 crc kubenswrapper[4747]: I1202 16:56:08.156412 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8sr7z" event={"ID":"b1fa5588-eede-4bb2-8b51-98f2582557bd","Type":"ContainerStarted","Data":"7763868ad0227fd08ca0078e28eab5f79243c7817644330b46e9775471fc27ae"} Dec 02 16:56:08 crc kubenswrapper[4747]: I1202 16:56:08.175059 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-8sr7z" podStartSLOduration=1.927790333 podStartE2EDuration="4.175027255s" podCreationTimestamp="2025-12-02 16:56:04 +0000 UTC" firstStartedPulling="2025-12-02 16:56:04.749777266 +0000 UTC m=+795.276666025" lastFinishedPulling="2025-12-02 16:56:06.997014188 +0000 UTC m=+797.523902947" observedRunningTime="2025-12-02 16:56:08.171229866 +0000 UTC m=+798.698118665" watchObservedRunningTime="2025-12-02 16:56:08.175027255 +0000 UTC m=+798.701916024" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.186346 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-f556t"] Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.191811 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-f556t" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.196701 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-ttrk5" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.196756 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lxrq8"] Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.198764 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lxrq8" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.203237 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.219575 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lxrq8"] Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.230840 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-f556t"] Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.247522 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-pj45b"] Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.248594 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-pj45b" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.252916 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fdxtf" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.253528 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fdxtf" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.276146 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chbcd\" (UniqueName: \"kubernetes.io/projected/cb597834-dde7-4bcb-b267-22aae223b524-kube-api-access-chbcd\") pod \"nmstate-metrics-7f946cbc9-f556t\" (UID: \"cb597834-dde7-4bcb-b267-22aae223b524\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-f556t" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.276264 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/00a29ad2-51bd-409c-afc3-ccd42b113c68-nmstate-lock\") pod \"nmstate-handler-pj45b\" (UID: \"00a29ad2-51bd-409c-afc3-ccd42b113c68\") " pod="openshift-nmstate/nmstate-handler-pj45b" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.276298 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/00a29ad2-51bd-409c-afc3-ccd42b113c68-dbus-socket\") pod \"nmstate-handler-pj45b\" (UID: \"00a29ad2-51bd-409c-afc3-ccd42b113c68\") " pod="openshift-nmstate/nmstate-handler-pj45b" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.276345 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/6659cefd-449d-4eb0-a02e-d118b586ebac-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-lxrq8\" (UID: \"6659cefd-449d-4eb0-a02e-d118b586ebac\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lxrq8" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.276572 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkwpl\" (UniqueName: \"kubernetes.io/projected/00a29ad2-51bd-409c-afc3-ccd42b113c68-kube-api-access-vkwpl\") pod \"nmstate-handler-pj45b\" (UID: \"00a29ad2-51bd-409c-afc3-ccd42b113c68\") " pod="openshift-nmstate/nmstate-handler-pj45b" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.276681 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/00a29ad2-51bd-409c-afc3-ccd42b113c68-ovs-socket\") pod \"nmstate-handler-pj45b\" (UID: \"00a29ad2-51bd-409c-afc3-ccd42b113c68\") " pod="openshift-nmstate/nmstate-handler-pj45b" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.276826 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnnsf\" (UniqueName: \"kubernetes.io/projected/6659cefd-449d-4eb0-a02e-d118b586ebac-kube-api-access-jnnsf\") pod \"nmstate-webhook-5f6d4c5ccb-lxrq8\" (UID: \"6659cefd-449d-4eb0-a02e-d118b586ebac\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lxrq8" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.306030 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fdxtf" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.356616 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn"] Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.360139 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.364507 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-p8qms" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.364792 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.365031 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.378937 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/00a29ad2-51bd-409c-afc3-ccd42b113c68-ovs-socket\") pod \"nmstate-handler-pj45b\" (UID: \"00a29ad2-51bd-409c-afc3-ccd42b113c68\") " pod="openshift-nmstate/nmstate-handler-pj45b" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.379009 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/2b678030-cb69-4a68-ab2f-140d36283f19-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-c7bcn\" (UID: \"2b678030-cb69-4a68-ab2f-140d36283f19\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.379065 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnnsf\" (UniqueName: \"kubernetes.io/projected/6659cefd-449d-4eb0-a02e-d118b586ebac-kube-api-access-jnnsf\") pod \"nmstate-webhook-5f6d4c5ccb-lxrq8\" (UID: \"6659cefd-449d-4eb0-a02e-d118b586ebac\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lxrq8" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.379107 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chbcd\" (UniqueName: \"kubernetes.io/projected/cb597834-dde7-4bcb-b267-22aae223b524-kube-api-access-chbcd\") pod \"nmstate-metrics-7f946cbc9-f556t\" (UID: \"cb597834-dde7-4bcb-b267-22aae223b524\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-f556t" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.379109 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/00a29ad2-51bd-409c-afc3-ccd42b113c68-ovs-socket\") pod \"nmstate-handler-pj45b\" (UID: \"00a29ad2-51bd-409c-afc3-ccd42b113c68\") " pod="openshift-nmstate/nmstate-handler-pj45b" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.379145 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/00a29ad2-51bd-409c-afc3-ccd42b113c68-nmstate-lock\") pod \"nmstate-handler-pj45b\" (UID: \"00a29ad2-51bd-409c-afc3-ccd42b113c68\") " pod="openshift-nmstate/nmstate-handler-pj45b" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.379240 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/00a29ad2-51bd-409c-afc3-ccd42b113c68-dbus-socket\") pod \"nmstate-handler-pj45b\" (UID: \"00a29ad2-51bd-409c-afc3-ccd42b113c68\") " pod="openshift-nmstate/nmstate-handler-pj45b" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.379268 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b678030-cb69-4a68-ab2f-140d36283f19-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-c7bcn\" (UID: \"2b678030-cb69-4a68-ab2f-140d36283f19\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.379304 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q484h\" (UniqueName: \"kubernetes.io/projected/2b678030-cb69-4a68-ab2f-140d36283f19-kube-api-access-q484h\") pod \"nmstate-console-plugin-7fbb5f6569-c7bcn\" (UID: \"2b678030-cb69-4a68-ab2f-140d36283f19\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.379349 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/6659cefd-449d-4eb0-a02e-d118b586ebac-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-lxrq8\" (UID: \"6659cefd-449d-4eb0-a02e-d118b586ebac\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lxrq8" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.379391 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkwpl\" (UniqueName: \"kubernetes.io/projected/00a29ad2-51bd-409c-afc3-ccd42b113c68-kube-api-access-vkwpl\") pod \"nmstate-handler-pj45b\" (UID: \"00a29ad2-51bd-409c-afc3-ccd42b113c68\") " pod="openshift-nmstate/nmstate-handler-pj45b" Dec 02 16:56:09 crc kubenswrapper[4747]: E1202 16:56:09.380001 4747 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.380042 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/00a29ad2-51bd-409c-afc3-ccd42b113c68-dbus-socket\") pod \"nmstate-handler-pj45b\" (UID: \"00a29ad2-51bd-409c-afc3-ccd42b113c68\") " pod="openshift-nmstate/nmstate-handler-pj45b" Dec 02 16:56:09 crc kubenswrapper[4747]: E1202 16:56:09.380064 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6659cefd-449d-4eb0-a02e-d118b586ebac-tls-key-pair podName:6659cefd-449d-4eb0-a02e-d118b586ebac nodeName:}" failed. No retries permitted until 2025-12-02 16:56:09.880040378 +0000 UTC m=+800.406929137 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/6659cefd-449d-4eb0-a02e-d118b586ebac-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-lxrq8" (UID: "6659cefd-449d-4eb0-a02e-d118b586ebac") : secret "openshift-nmstate-webhook" not found Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.380520 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/00a29ad2-51bd-409c-afc3-ccd42b113c68-nmstate-lock\") pod \"nmstate-handler-pj45b\" (UID: \"00a29ad2-51bd-409c-afc3-ccd42b113c68\") " pod="openshift-nmstate/nmstate-handler-pj45b" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.387004 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn"] Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.400592 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chbcd\" (UniqueName: \"kubernetes.io/projected/cb597834-dde7-4bcb-b267-22aae223b524-kube-api-access-chbcd\") pod \"nmstate-metrics-7f946cbc9-f556t\" (UID: \"cb597834-dde7-4bcb-b267-22aae223b524\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-f556t" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.400828 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnnsf\" (UniqueName: \"kubernetes.io/projected/6659cefd-449d-4eb0-a02e-d118b586ebac-kube-api-access-jnnsf\") pod \"nmstate-webhook-5f6d4c5ccb-lxrq8\" (UID: \"6659cefd-449d-4eb0-a02e-d118b586ebac\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lxrq8" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.401226 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkwpl\" (UniqueName: \"kubernetes.io/projected/00a29ad2-51bd-409c-afc3-ccd42b113c68-kube-api-access-vkwpl\") pod \"nmstate-handler-pj45b\" (UID: \"00a29ad2-51bd-409c-afc3-ccd42b113c68\") " pod="openshift-nmstate/nmstate-handler-pj45b" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.480689 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b678030-cb69-4a68-ab2f-140d36283f19-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-c7bcn\" (UID: \"2b678030-cb69-4a68-ab2f-140d36283f19\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.480763 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q484h\" (UniqueName: \"kubernetes.io/projected/2b678030-cb69-4a68-ab2f-140d36283f19-kube-api-access-q484h\") pod \"nmstate-console-plugin-7fbb5f6569-c7bcn\" (UID: \"2b678030-cb69-4a68-ab2f-140d36283f19\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.480849 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/2b678030-cb69-4a68-ab2f-140d36283f19-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-c7bcn\" (UID: \"2b678030-cb69-4a68-ab2f-140d36283f19\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn" Dec 02 16:56:09 crc kubenswrapper[4747]: E1202 16:56:09.480880 4747 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 02 16:56:09 crc kubenswrapper[4747]: E1202 16:56:09.480976 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2b678030-cb69-4a68-ab2f-140d36283f19-plugin-serving-cert podName:2b678030-cb69-4a68-ab2f-140d36283f19 nodeName:}" failed. No retries permitted until 2025-12-02 16:56:09.980954606 +0000 UTC m=+800.507843355 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/2b678030-cb69-4a68-ab2f-140d36283f19-plugin-serving-cert") pod "nmstate-console-plugin-7fbb5f6569-c7bcn" (UID: "2b678030-cb69-4a68-ab2f-140d36283f19") : secret "plugin-serving-cert" not found Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.482085 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/2b678030-cb69-4a68-ab2f-140d36283f19-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-c7bcn\" (UID: \"2b678030-cb69-4a68-ab2f-140d36283f19\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.505816 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q484h\" (UniqueName: \"kubernetes.io/projected/2b678030-cb69-4a68-ab2f-140d36283f19-kube-api-access-q484h\") pod \"nmstate-console-plugin-7fbb5f6569-c7bcn\" (UID: \"2b678030-cb69-4a68-ab2f-140d36283f19\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.534585 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-f556t" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.537645 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7fb6788756-h5cw6"] Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.538537 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.561877 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7fb6788756-h5cw6"] Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.573487 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-pj45b" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.595818 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/77787867-9cd8-400f-9070-4fe42985b265-oauth-serving-cert\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.596020 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/77787867-9cd8-400f-9070-4fe42985b265-console-serving-cert\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.596044 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/77787867-9cd8-400f-9070-4fe42985b265-console-config\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.596079 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/77787867-9cd8-400f-9070-4fe42985b265-console-oauth-config\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.596110 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/77787867-9cd8-400f-9070-4fe42985b265-service-ca\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.596145 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jk6nm\" (UniqueName: \"kubernetes.io/projected/77787867-9cd8-400f-9070-4fe42985b265-kube-api-access-jk6nm\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.596211 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/77787867-9cd8-400f-9070-4fe42985b265-trusted-ca-bundle\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: W1202 16:56:09.607145 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod00a29ad2_51bd_409c_afc3_ccd42b113c68.slice/crio-457664873abd7a4f719c29eeb2be8209b73b47a3f81099b43bd3405574a54066 WatchSource:0}: Error finding container 457664873abd7a4f719c29eeb2be8209b73b47a3f81099b43bd3405574a54066: Status 404 returned error can't find the container with id 457664873abd7a4f719c29eeb2be8209b73b47a3f81099b43bd3405574a54066 Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.697658 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/77787867-9cd8-400f-9070-4fe42985b265-console-serving-cert\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.697712 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/77787867-9cd8-400f-9070-4fe42985b265-console-config\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.697746 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/77787867-9cd8-400f-9070-4fe42985b265-console-oauth-config\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.697776 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/77787867-9cd8-400f-9070-4fe42985b265-service-ca\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.697804 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jk6nm\" (UniqueName: \"kubernetes.io/projected/77787867-9cd8-400f-9070-4fe42985b265-kube-api-access-jk6nm\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.697847 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/77787867-9cd8-400f-9070-4fe42985b265-trusted-ca-bundle\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.697939 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/77787867-9cd8-400f-9070-4fe42985b265-oauth-serving-cert\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.699261 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/77787867-9cd8-400f-9070-4fe42985b265-oauth-serving-cert\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.700225 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/77787867-9cd8-400f-9070-4fe42985b265-service-ca\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.700716 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/77787867-9cd8-400f-9070-4fe42985b265-console-config\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.700851 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/77787867-9cd8-400f-9070-4fe42985b265-trusted-ca-bundle\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.711809 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/77787867-9cd8-400f-9070-4fe42985b265-console-oauth-config\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.712045 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/77787867-9cd8-400f-9070-4fe42985b265-console-serving-cert\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.719844 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jk6nm\" (UniqueName: \"kubernetes.io/projected/77787867-9cd8-400f-9070-4fe42985b265-kube-api-access-jk6nm\") pod \"console-7fb6788756-h5cw6\" (UID: \"77787867-9cd8-400f-9070-4fe42985b265\") " pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.783555 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-f556t"] Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.895501 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.900443 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/6659cefd-449d-4eb0-a02e-d118b586ebac-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-lxrq8\" (UID: \"6659cefd-449d-4eb0-a02e-d118b586ebac\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lxrq8" Dec 02 16:56:09 crc kubenswrapper[4747]: I1202 16:56:09.904311 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/6659cefd-449d-4eb0-a02e-d118b586ebac-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-lxrq8\" (UID: \"6659cefd-449d-4eb0-a02e-d118b586ebac\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lxrq8" Dec 02 16:56:10 crc kubenswrapper[4747]: I1202 16:56:10.002189 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b678030-cb69-4a68-ab2f-140d36283f19-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-c7bcn\" (UID: \"2b678030-cb69-4a68-ab2f-140d36283f19\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn" Dec 02 16:56:10 crc kubenswrapper[4747]: I1202 16:56:10.008508 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b678030-cb69-4a68-ab2f-140d36283f19-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-c7bcn\" (UID: \"2b678030-cb69-4a68-ab2f-140d36283f19\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn" Dec 02 16:56:10 crc kubenswrapper[4747]: I1202 16:56:10.090994 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7fb6788756-h5cw6"] Dec 02 16:56:10 crc kubenswrapper[4747]: W1202 16:56:10.096797 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77787867_9cd8_400f_9070_4fe42985b265.slice/crio-bd4ba92dcfd0d0e48d04cab84e54275b10bf079050f1d759daed876c09b54e5e WatchSource:0}: Error finding container bd4ba92dcfd0d0e48d04cab84e54275b10bf079050f1d759daed876c09b54e5e: Status 404 returned error can't find the container with id bd4ba92dcfd0d0e48d04cab84e54275b10bf079050f1d759daed876c09b54e5e Dec 02 16:56:10 crc kubenswrapper[4747]: I1202 16:56:10.147274 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lxrq8" Dec 02 16:56:10 crc kubenswrapper[4747]: I1202 16:56:10.169458 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-pj45b" event={"ID":"00a29ad2-51bd-409c-afc3-ccd42b113c68","Type":"ContainerStarted","Data":"457664873abd7a4f719c29eeb2be8209b73b47a3f81099b43bd3405574a54066"} Dec 02 16:56:10 crc kubenswrapper[4747]: I1202 16:56:10.170929 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-f556t" event={"ID":"cb597834-dde7-4bcb-b267-22aae223b524","Type":"ContainerStarted","Data":"d6c715ca9e53ad2591f96cf9ba1b7f20a5cc1b97e8fbcddfa078d2e8a1431b6b"} Dec 02 16:56:10 crc kubenswrapper[4747]: I1202 16:56:10.173727 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7fb6788756-h5cw6" event={"ID":"77787867-9cd8-400f-9070-4fe42985b265","Type":"ContainerStarted","Data":"bd4ba92dcfd0d0e48d04cab84e54275b10bf079050f1d759daed876c09b54e5e"} Dec 02 16:56:10 crc kubenswrapper[4747]: I1202 16:56:10.237337 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fdxtf" Dec 02 16:56:10 crc kubenswrapper[4747]: I1202 16:56:10.290518 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn" Dec 02 16:56:10 crc kubenswrapper[4747]: I1202 16:56:10.510079 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lxrq8"] Dec 02 16:56:10 crc kubenswrapper[4747]: I1202 16:56:10.548302 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn"] Dec 02 16:56:10 crc kubenswrapper[4747]: W1202 16:56:10.554045 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b678030_cb69_4a68_ab2f_140d36283f19.slice/crio-54175048f7607a8884ae4bab996059ef10ebec0e2dd556f4a53308456d09e345 WatchSource:0}: Error finding container 54175048f7607a8884ae4bab996059ef10ebec0e2dd556f4a53308456d09e345: Status 404 returned error can't find the container with id 54175048f7607a8884ae4bab996059ef10ebec0e2dd556f4a53308456d09e345 Dec 02 16:56:10 crc kubenswrapper[4747]: I1202 16:56:10.905562 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fdxtf"] Dec 02 16:56:11 crc kubenswrapper[4747]: I1202 16:56:11.180438 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lxrq8" event={"ID":"6659cefd-449d-4eb0-a02e-d118b586ebac","Type":"ContainerStarted","Data":"6b7a75bdc42fbdcffa5791f61edd1511ed69505396f574e5c24a39d35bd47be3"} Dec 02 16:56:11 crc kubenswrapper[4747]: I1202 16:56:11.182271 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn" event={"ID":"2b678030-cb69-4a68-ab2f-140d36283f19","Type":"ContainerStarted","Data":"54175048f7607a8884ae4bab996059ef10ebec0e2dd556f4a53308456d09e345"} Dec 02 16:56:11 crc kubenswrapper[4747]: I1202 16:56:11.183834 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7fb6788756-h5cw6" event={"ID":"77787867-9cd8-400f-9070-4fe42985b265","Type":"ContainerStarted","Data":"08bd3111490f93ec4cae14710159c8e8f5f9296cfa2e74876fd2d2a39f5246cf"} Dec 02 16:56:11 crc kubenswrapper[4747]: I1202 16:56:11.205526 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7fb6788756-h5cw6" podStartSLOduration=2.205498827 podStartE2EDuration="2.205498827s" podCreationTimestamp="2025-12-02 16:56:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:56:11.203807039 +0000 UTC m=+801.730695788" watchObservedRunningTime="2025-12-02 16:56:11.205498827 +0000 UTC m=+801.732387576" Dec 02 16:56:12 crc kubenswrapper[4747]: I1202 16:56:12.190610 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fdxtf" podUID="a0ff08da-7c85-4998-8c5a-12f46a06e892" containerName="registry-server" containerID="cri-o://4844cae4ea09b0a14ea596714b954996d90594483d896af4f9f023f6ea80bea2" gracePeriod=2 Dec 02 16:56:13 crc kubenswrapper[4747]: I1202 16:56:13.200424 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lxrq8" event={"ID":"6659cefd-449d-4eb0-a02e-d118b586ebac","Type":"ContainerStarted","Data":"6d97196e708d6b5c56b90aebd8ee74576bb3bd93ed7aaee2605385d6355371e6"} Dec 02 16:56:13 crc kubenswrapper[4747]: I1202 16:56:13.201234 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lxrq8" Dec 02 16:56:13 crc kubenswrapper[4747]: I1202 16:56:13.202422 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-f556t" event={"ID":"cb597834-dde7-4bcb-b267-22aae223b524","Type":"ContainerStarted","Data":"07ed64658dcfa42bd4760a88ab9345e2f087dfffdb595e27971421e47216030d"} Dec 02 16:56:13 crc kubenswrapper[4747]: I1202 16:56:13.204325 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-pj45b" event={"ID":"00a29ad2-51bd-409c-afc3-ccd42b113c68","Type":"ContainerStarted","Data":"352f3e22f8f43b950f79583d3ba53568cff25e16aa1e01b26b0efeeb308928b8"} Dec 02 16:56:13 crc kubenswrapper[4747]: I1202 16:56:13.204561 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-pj45b" Dec 02 16:56:13 crc kubenswrapper[4747]: I1202 16:56:13.233141 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lxrq8" podStartSLOduration=2.541785265 podStartE2EDuration="4.233112802s" podCreationTimestamp="2025-12-02 16:56:09 +0000 UTC" firstStartedPulling="2025-12-02 16:56:10.504456627 +0000 UTC m=+801.031345376" lastFinishedPulling="2025-12-02 16:56:12.195784164 +0000 UTC m=+802.722672913" observedRunningTime="2025-12-02 16:56:13.223724333 +0000 UTC m=+803.750613102" watchObservedRunningTime="2025-12-02 16:56:13.233112802 +0000 UTC m=+803.760001571" Dec 02 16:56:13 crc kubenswrapper[4747]: I1202 16:56:13.250083 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-pj45b" podStartSLOduration=1.666524111 podStartE2EDuration="4.250057429s" podCreationTimestamp="2025-12-02 16:56:09 +0000 UTC" firstStartedPulling="2025-12-02 16:56:09.611197266 +0000 UTC m=+800.138086015" lastFinishedPulling="2025-12-02 16:56:12.194730584 +0000 UTC m=+802.721619333" observedRunningTime="2025-12-02 16:56:13.249161673 +0000 UTC m=+803.776050432" watchObservedRunningTime="2025-12-02 16:56:13.250057429 +0000 UTC m=+803.776946178" Dec 02 16:56:14 crc kubenswrapper[4747]: I1202 16:56:14.213602 4747 generic.go:334] "Generic (PLEG): container finished" podID="a0ff08da-7c85-4998-8c5a-12f46a06e892" containerID="4844cae4ea09b0a14ea596714b954996d90594483d896af4f9f023f6ea80bea2" exitCode=0 Dec 02 16:56:14 crc kubenswrapper[4747]: I1202 16:56:14.213671 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdxtf" event={"ID":"a0ff08da-7c85-4998-8c5a-12f46a06e892","Type":"ContainerDied","Data":"4844cae4ea09b0a14ea596714b954996d90594483d896af4f9f023f6ea80bea2"} Dec 02 16:56:14 crc kubenswrapper[4747]: I1202 16:56:14.580416 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fdxtf" Dec 02 16:56:14 crc kubenswrapper[4747]: I1202 16:56:14.700978 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4trvc\" (UniqueName: \"kubernetes.io/projected/a0ff08da-7c85-4998-8c5a-12f46a06e892-kube-api-access-4trvc\") pod \"a0ff08da-7c85-4998-8c5a-12f46a06e892\" (UID: \"a0ff08da-7c85-4998-8c5a-12f46a06e892\") " Dec 02 16:56:14 crc kubenswrapper[4747]: I1202 16:56:14.701090 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0ff08da-7c85-4998-8c5a-12f46a06e892-catalog-content\") pod \"a0ff08da-7c85-4998-8c5a-12f46a06e892\" (UID: \"a0ff08da-7c85-4998-8c5a-12f46a06e892\") " Dec 02 16:56:14 crc kubenswrapper[4747]: I1202 16:56:14.701147 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0ff08da-7c85-4998-8c5a-12f46a06e892-utilities\") pod \"a0ff08da-7c85-4998-8c5a-12f46a06e892\" (UID: \"a0ff08da-7c85-4998-8c5a-12f46a06e892\") " Dec 02 16:56:14 crc kubenswrapper[4747]: I1202 16:56:14.702036 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0ff08da-7c85-4998-8c5a-12f46a06e892-utilities" (OuterVolumeSpecName: "utilities") pod "a0ff08da-7c85-4998-8c5a-12f46a06e892" (UID: "a0ff08da-7c85-4998-8c5a-12f46a06e892"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:56:14 crc kubenswrapper[4747]: I1202 16:56:14.705096 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0ff08da-7c85-4998-8c5a-12f46a06e892-kube-api-access-4trvc" (OuterVolumeSpecName: "kube-api-access-4trvc") pod "a0ff08da-7c85-4998-8c5a-12f46a06e892" (UID: "a0ff08da-7c85-4998-8c5a-12f46a06e892"). InnerVolumeSpecName "kube-api-access-4trvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:56:14 crc kubenswrapper[4747]: I1202 16:56:14.803014 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4trvc\" (UniqueName: \"kubernetes.io/projected/a0ff08da-7c85-4998-8c5a-12f46a06e892-kube-api-access-4trvc\") on node \"crc\" DevicePath \"\"" Dec 02 16:56:14 crc kubenswrapper[4747]: I1202 16:56:14.803563 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0ff08da-7c85-4998-8c5a-12f46a06e892-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 16:56:14 crc kubenswrapper[4747]: I1202 16:56:14.813790 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0ff08da-7c85-4998-8c5a-12f46a06e892-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a0ff08da-7c85-4998-8c5a-12f46a06e892" (UID: "a0ff08da-7c85-4998-8c5a-12f46a06e892"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:56:14 crc kubenswrapper[4747]: I1202 16:56:14.905368 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0ff08da-7c85-4998-8c5a-12f46a06e892-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 16:56:15 crc kubenswrapper[4747]: I1202 16:56:15.228293 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn" event={"ID":"2b678030-cb69-4a68-ab2f-140d36283f19","Type":"ContainerStarted","Data":"3f34939fda3bef79cbbd11a3ae827b51deef774de17801df93d08344dfc41d61"} Dec 02 16:56:15 crc kubenswrapper[4747]: I1202 16:56:15.232607 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fdxtf" event={"ID":"a0ff08da-7c85-4998-8c5a-12f46a06e892","Type":"ContainerDied","Data":"8a20c5a6059553d3b9f31ffe8bca7339f8a190fe478fa94f6c5308b87b876cc9"} Dec 02 16:56:15 crc kubenswrapper[4747]: I1202 16:56:15.232691 4747 scope.go:117] "RemoveContainer" containerID="4844cae4ea09b0a14ea596714b954996d90594483d896af4f9f023f6ea80bea2" Dec 02 16:56:15 crc kubenswrapper[4747]: I1202 16:56:15.232709 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fdxtf" Dec 02 16:56:15 crc kubenswrapper[4747]: I1202 16:56:15.253336 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-c7bcn" podStartSLOduration=2.19582979 podStartE2EDuration="6.253311673s" podCreationTimestamp="2025-12-02 16:56:09 +0000 UTC" firstStartedPulling="2025-12-02 16:56:10.556319946 +0000 UTC m=+801.083208695" lastFinishedPulling="2025-12-02 16:56:14.613801829 +0000 UTC m=+805.140690578" observedRunningTime="2025-12-02 16:56:15.249949557 +0000 UTC m=+805.776838346" watchObservedRunningTime="2025-12-02 16:56:15.253311673 +0000 UTC m=+805.780200422" Dec 02 16:56:15 crc kubenswrapper[4747]: I1202 16:56:15.257472 4747 scope.go:117] "RemoveContainer" containerID="e3b2b71d7e27ac3c2ed621dde61c2f6b2283377e44c5b87c424f8fcc5f73b9ac" Dec 02 16:56:15 crc kubenswrapper[4747]: I1202 16:56:15.275802 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fdxtf"] Dec 02 16:56:15 crc kubenswrapper[4747]: I1202 16:56:15.281010 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fdxtf"] Dec 02 16:56:15 crc kubenswrapper[4747]: I1202 16:56:15.292547 4747 scope.go:117] "RemoveContainer" containerID="acb6628a48da2ac3f46b5c1efa53babd41a5f70ba1c9aae4474f456ffb1061cb" Dec 02 16:56:15 crc kubenswrapper[4747]: I1202 16:56:15.772576 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0ff08da-7c85-4998-8c5a-12f46a06e892" path="/var/lib/kubelet/pods/a0ff08da-7c85-4998-8c5a-12f46a06e892/volumes" Dec 02 16:56:18 crc kubenswrapper[4747]: I1202 16:56:18.260800 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-f556t" event={"ID":"cb597834-dde7-4bcb-b267-22aae223b524","Type":"ContainerStarted","Data":"dee517a8e4073c5075b7db7d844e8622e9b6988748684bada718c71ea79c485c"} Dec 02 16:56:18 crc kubenswrapper[4747]: I1202 16:56:18.289287 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-f556t" podStartSLOduration=1.979332242 podStartE2EDuration="9.289260352s" podCreationTimestamp="2025-12-02 16:56:09 +0000 UTC" firstStartedPulling="2025-12-02 16:56:09.793994395 +0000 UTC m=+800.320883144" lastFinishedPulling="2025-12-02 16:56:17.103922505 +0000 UTC m=+807.630811254" observedRunningTime="2025-12-02 16:56:18.283075075 +0000 UTC m=+808.809963854" watchObservedRunningTime="2025-12-02 16:56:18.289260352 +0000 UTC m=+808.816149111" Dec 02 16:56:19 crc kubenswrapper[4747]: I1202 16:56:19.607961 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-pj45b" Dec 02 16:56:19 crc kubenswrapper[4747]: I1202 16:56:19.896028 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:19 crc kubenswrapper[4747]: I1202 16:56:19.896118 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:19 crc kubenswrapper[4747]: I1202 16:56:19.903641 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:20 crc kubenswrapper[4747]: I1202 16:56:20.280028 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7fb6788756-h5cw6" Dec 02 16:56:20 crc kubenswrapper[4747]: I1202 16:56:20.334508 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-8w6hw"] Dec 02 16:56:30 crc kubenswrapper[4747]: I1202 16:56:30.160061 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lxrq8" Dec 02 16:56:31 crc kubenswrapper[4747]: I1202 16:56:31.795471 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 16:56:31 crc kubenswrapper[4747]: I1202 16:56:31.795588 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 16:56:43 crc kubenswrapper[4747]: I1202 16:56:43.938119 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd"] Dec 02 16:56:43 crc kubenswrapper[4747]: E1202 16:56:43.939122 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0ff08da-7c85-4998-8c5a-12f46a06e892" containerName="extract-utilities" Dec 02 16:56:43 crc kubenswrapper[4747]: I1202 16:56:43.939137 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0ff08da-7c85-4998-8c5a-12f46a06e892" containerName="extract-utilities" Dec 02 16:56:43 crc kubenswrapper[4747]: E1202 16:56:43.939156 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0ff08da-7c85-4998-8c5a-12f46a06e892" containerName="registry-server" Dec 02 16:56:43 crc kubenswrapper[4747]: I1202 16:56:43.939162 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0ff08da-7c85-4998-8c5a-12f46a06e892" containerName="registry-server" Dec 02 16:56:43 crc kubenswrapper[4747]: E1202 16:56:43.939178 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0ff08da-7c85-4998-8c5a-12f46a06e892" containerName="extract-content" Dec 02 16:56:43 crc kubenswrapper[4747]: I1202 16:56:43.939186 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0ff08da-7c85-4998-8c5a-12f46a06e892" containerName="extract-content" Dec 02 16:56:43 crc kubenswrapper[4747]: I1202 16:56:43.939340 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0ff08da-7c85-4998-8c5a-12f46a06e892" containerName="registry-server" Dec 02 16:56:43 crc kubenswrapper[4747]: I1202 16:56:43.940341 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" Dec 02 16:56:43 crc kubenswrapper[4747]: I1202 16:56:43.943431 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 02 16:56:43 crc kubenswrapper[4747]: I1202 16:56:43.956721 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd"] Dec 02 16:56:44 crc kubenswrapper[4747]: I1202 16:56:44.013564 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0309c5e1-1f38-47c3-adc5-553675c969a8-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd\" (UID: \"0309c5e1-1f38-47c3-adc5-553675c969a8\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" Dec 02 16:56:44 crc kubenswrapper[4747]: I1202 16:56:44.013659 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhq7l\" (UniqueName: \"kubernetes.io/projected/0309c5e1-1f38-47c3-adc5-553675c969a8-kube-api-access-bhq7l\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd\" (UID: \"0309c5e1-1f38-47c3-adc5-553675c969a8\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" Dec 02 16:56:44 crc kubenswrapper[4747]: I1202 16:56:44.013688 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0309c5e1-1f38-47c3-adc5-553675c969a8-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd\" (UID: \"0309c5e1-1f38-47c3-adc5-553675c969a8\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" Dec 02 16:56:44 crc kubenswrapper[4747]: I1202 16:56:44.114660 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0309c5e1-1f38-47c3-adc5-553675c969a8-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd\" (UID: \"0309c5e1-1f38-47c3-adc5-553675c969a8\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" Dec 02 16:56:44 crc kubenswrapper[4747]: I1202 16:56:44.114754 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0309c5e1-1f38-47c3-adc5-553675c969a8-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd\" (UID: \"0309c5e1-1f38-47c3-adc5-553675c969a8\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" Dec 02 16:56:44 crc kubenswrapper[4747]: I1202 16:56:44.114814 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhq7l\" (UniqueName: \"kubernetes.io/projected/0309c5e1-1f38-47c3-adc5-553675c969a8-kube-api-access-bhq7l\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd\" (UID: \"0309c5e1-1f38-47c3-adc5-553675c969a8\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" Dec 02 16:56:44 crc kubenswrapper[4747]: I1202 16:56:44.115228 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0309c5e1-1f38-47c3-adc5-553675c969a8-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd\" (UID: \"0309c5e1-1f38-47c3-adc5-553675c969a8\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" Dec 02 16:56:44 crc kubenswrapper[4747]: I1202 16:56:44.115375 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0309c5e1-1f38-47c3-adc5-553675c969a8-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd\" (UID: \"0309c5e1-1f38-47c3-adc5-553675c969a8\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" Dec 02 16:56:44 crc kubenswrapper[4747]: I1202 16:56:44.136138 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhq7l\" (UniqueName: \"kubernetes.io/projected/0309c5e1-1f38-47c3-adc5-553675c969a8-kube-api-access-bhq7l\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd\" (UID: \"0309c5e1-1f38-47c3-adc5-553675c969a8\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" Dec 02 16:56:44 crc kubenswrapper[4747]: I1202 16:56:44.262486 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" Dec 02 16:56:44 crc kubenswrapper[4747]: I1202 16:56:44.731448 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd"] Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.387265 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-8w6hw" podUID="fd81d86e-f692-4a5d-885f-9c37fa608b4f" containerName="console" containerID="cri-o://6b33a4f877f904bf8f65afe953b16774583114421f9d95c00c10f1adf4db4d09" gracePeriod=15 Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.470086 4747 generic.go:334] "Generic (PLEG): container finished" podID="0309c5e1-1f38-47c3-adc5-553675c969a8" containerID="8112ec11cc0f55ac84c21490a506fa70282497d169156d90589995df61c3f122" exitCode=0 Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.470160 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" event={"ID":"0309c5e1-1f38-47c3-adc5-553675c969a8","Type":"ContainerDied","Data":"8112ec11cc0f55ac84c21490a506fa70282497d169156d90589995df61c3f122"} Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.470210 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" event={"ID":"0309c5e1-1f38-47c3-adc5-553675c969a8","Type":"ContainerStarted","Data":"2be60ecc40589f837c50c4d987e8b7edb51fef4fcb7e72458d06fcabcbc26156"} Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.729487 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-8w6hw_fd81d86e-f692-4a5d-885f-9c37fa608b4f/console/0.log" Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.729956 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.840303 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-oauth-serving-cert\") pod \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.840373 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-oauth-config\") pod \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.840438 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-service-ca\") pod \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.840504 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-trusted-ca-bundle\") pod \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.840535 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-config\") pod \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.840595 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lmt4\" (UniqueName: \"kubernetes.io/projected/fd81d86e-f692-4a5d-885f-9c37fa608b4f-kube-api-access-2lmt4\") pod \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.840620 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-serving-cert\") pod \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\" (UID: \"fd81d86e-f692-4a5d-885f-9c37fa608b4f\") " Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.841269 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-service-ca" (OuterVolumeSpecName: "service-ca") pod "fd81d86e-f692-4a5d-885f-9c37fa608b4f" (UID: "fd81d86e-f692-4a5d-885f-9c37fa608b4f"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.841263 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "fd81d86e-f692-4a5d-885f-9c37fa608b4f" (UID: "fd81d86e-f692-4a5d-885f-9c37fa608b4f"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.841283 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "fd81d86e-f692-4a5d-885f-9c37fa608b4f" (UID: "fd81d86e-f692-4a5d-885f-9c37fa608b4f"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.841629 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-config" (OuterVolumeSpecName: "console-config") pod "fd81d86e-f692-4a5d-885f-9c37fa608b4f" (UID: "fd81d86e-f692-4a5d-885f-9c37fa608b4f"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.848205 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd81d86e-f692-4a5d-885f-9c37fa608b4f-kube-api-access-2lmt4" (OuterVolumeSpecName: "kube-api-access-2lmt4") pod "fd81d86e-f692-4a5d-885f-9c37fa608b4f" (UID: "fd81d86e-f692-4a5d-885f-9c37fa608b4f"). InnerVolumeSpecName "kube-api-access-2lmt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.848919 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "fd81d86e-f692-4a5d-885f-9c37fa608b4f" (UID: "fd81d86e-f692-4a5d-885f-9c37fa608b4f"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.853334 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "fd81d86e-f692-4a5d-885f-9c37fa608b4f" (UID: "fd81d86e-f692-4a5d-885f-9c37fa608b4f"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.942947 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.942996 4747 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.943005 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lmt4\" (UniqueName: \"kubernetes.io/projected/fd81d86e-f692-4a5d-885f-9c37fa608b4f-kube-api-access-2lmt4\") on node \"crc\" DevicePath \"\"" Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.943018 4747 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.943027 4747 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.943035 4747 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fd81d86e-f692-4a5d-885f-9c37fa608b4f-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 02 16:56:45 crc kubenswrapper[4747]: I1202 16:56:45.943045 4747 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fd81d86e-f692-4a5d-885f-9c37fa608b4f-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 16:56:46 crc kubenswrapper[4747]: I1202 16:56:46.478404 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-8w6hw_fd81d86e-f692-4a5d-885f-9c37fa608b4f/console/0.log" Dec 02 16:56:46 crc kubenswrapper[4747]: I1202 16:56:46.478471 4747 generic.go:334] "Generic (PLEG): container finished" podID="fd81d86e-f692-4a5d-885f-9c37fa608b4f" containerID="6b33a4f877f904bf8f65afe953b16774583114421f9d95c00c10f1adf4db4d09" exitCode=2 Dec 02 16:56:46 crc kubenswrapper[4747]: I1202 16:56:46.478518 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8w6hw" event={"ID":"fd81d86e-f692-4a5d-885f-9c37fa608b4f","Type":"ContainerDied","Data":"6b33a4f877f904bf8f65afe953b16774583114421f9d95c00c10f1adf4db4d09"} Dec 02 16:56:46 crc kubenswrapper[4747]: I1202 16:56:46.478564 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-8w6hw" event={"ID":"fd81d86e-f692-4a5d-885f-9c37fa608b4f","Type":"ContainerDied","Data":"a89bac04ade08d36ce9cbdd3f06311a5b912657e1d2be08bc1d0def204601206"} Dec 02 16:56:46 crc kubenswrapper[4747]: I1202 16:56:46.478590 4747 scope.go:117] "RemoveContainer" containerID="6b33a4f877f904bf8f65afe953b16774583114421f9d95c00c10f1adf4db4d09" Dec 02 16:56:46 crc kubenswrapper[4747]: I1202 16:56:46.478631 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-8w6hw" Dec 02 16:56:46 crc kubenswrapper[4747]: I1202 16:56:46.498915 4747 scope.go:117] "RemoveContainer" containerID="6b33a4f877f904bf8f65afe953b16774583114421f9d95c00c10f1adf4db4d09" Dec 02 16:56:46 crc kubenswrapper[4747]: E1202 16:56:46.499492 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b33a4f877f904bf8f65afe953b16774583114421f9d95c00c10f1adf4db4d09\": container with ID starting with 6b33a4f877f904bf8f65afe953b16774583114421f9d95c00c10f1adf4db4d09 not found: ID does not exist" containerID="6b33a4f877f904bf8f65afe953b16774583114421f9d95c00c10f1adf4db4d09" Dec 02 16:56:46 crc kubenswrapper[4747]: I1202 16:56:46.499546 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b33a4f877f904bf8f65afe953b16774583114421f9d95c00c10f1adf4db4d09"} err="failed to get container status \"6b33a4f877f904bf8f65afe953b16774583114421f9d95c00c10f1adf4db4d09\": rpc error: code = NotFound desc = could not find container \"6b33a4f877f904bf8f65afe953b16774583114421f9d95c00c10f1adf4db4d09\": container with ID starting with 6b33a4f877f904bf8f65afe953b16774583114421f9d95c00c10f1adf4db4d09 not found: ID does not exist" Dec 02 16:56:46 crc kubenswrapper[4747]: I1202 16:56:46.525604 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-8w6hw"] Dec 02 16:56:46 crc kubenswrapper[4747]: I1202 16:56:46.528959 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-8w6hw"] Dec 02 16:56:47 crc kubenswrapper[4747]: I1202 16:56:47.496266 4747 generic.go:334] "Generic (PLEG): container finished" podID="0309c5e1-1f38-47c3-adc5-553675c969a8" containerID="2bfaaf77ab8c02cf797e28571ea0d28421d0cac2eefe52641a8346ab23297028" exitCode=0 Dec 02 16:56:47 crc kubenswrapper[4747]: I1202 16:56:47.496406 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" event={"ID":"0309c5e1-1f38-47c3-adc5-553675c969a8","Type":"ContainerDied","Data":"2bfaaf77ab8c02cf797e28571ea0d28421d0cac2eefe52641a8346ab23297028"} Dec 02 16:56:47 crc kubenswrapper[4747]: I1202 16:56:47.772156 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd81d86e-f692-4a5d-885f-9c37fa608b4f" path="/var/lib/kubelet/pods/fd81d86e-f692-4a5d-885f-9c37fa608b4f/volumes" Dec 02 16:56:48 crc kubenswrapper[4747]: I1202 16:56:48.510768 4747 generic.go:334] "Generic (PLEG): container finished" podID="0309c5e1-1f38-47c3-adc5-553675c969a8" containerID="498983cf21027aed72402fea5d196f49940d1145123254641bc2adc695bff60b" exitCode=0 Dec 02 16:56:48 crc kubenswrapper[4747]: I1202 16:56:48.510936 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" event={"ID":"0309c5e1-1f38-47c3-adc5-553675c969a8","Type":"ContainerDied","Data":"498983cf21027aed72402fea5d196f49940d1145123254641bc2adc695bff60b"} Dec 02 16:56:49 crc kubenswrapper[4747]: I1202 16:56:49.792407 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" Dec 02 16:56:49 crc kubenswrapper[4747]: I1202 16:56:49.903377 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0309c5e1-1f38-47c3-adc5-553675c969a8-util\") pod \"0309c5e1-1f38-47c3-adc5-553675c969a8\" (UID: \"0309c5e1-1f38-47c3-adc5-553675c969a8\") " Dec 02 16:56:49 crc kubenswrapper[4747]: I1202 16:56:49.903458 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhq7l\" (UniqueName: \"kubernetes.io/projected/0309c5e1-1f38-47c3-adc5-553675c969a8-kube-api-access-bhq7l\") pod \"0309c5e1-1f38-47c3-adc5-553675c969a8\" (UID: \"0309c5e1-1f38-47c3-adc5-553675c969a8\") " Dec 02 16:56:49 crc kubenswrapper[4747]: I1202 16:56:49.903622 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0309c5e1-1f38-47c3-adc5-553675c969a8-bundle\") pod \"0309c5e1-1f38-47c3-adc5-553675c969a8\" (UID: \"0309c5e1-1f38-47c3-adc5-553675c969a8\") " Dec 02 16:56:49 crc kubenswrapper[4747]: I1202 16:56:49.904635 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0309c5e1-1f38-47c3-adc5-553675c969a8-bundle" (OuterVolumeSpecName: "bundle") pod "0309c5e1-1f38-47c3-adc5-553675c969a8" (UID: "0309c5e1-1f38-47c3-adc5-553675c969a8"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:56:49 crc kubenswrapper[4747]: I1202 16:56:49.911238 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0309c5e1-1f38-47c3-adc5-553675c969a8-kube-api-access-bhq7l" (OuterVolumeSpecName: "kube-api-access-bhq7l") pod "0309c5e1-1f38-47c3-adc5-553675c969a8" (UID: "0309c5e1-1f38-47c3-adc5-553675c969a8"). InnerVolumeSpecName "kube-api-access-bhq7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:56:49 crc kubenswrapper[4747]: I1202 16:56:49.919352 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0309c5e1-1f38-47c3-adc5-553675c969a8-util" (OuterVolumeSpecName: "util") pod "0309c5e1-1f38-47c3-adc5-553675c969a8" (UID: "0309c5e1-1f38-47c3-adc5-553675c969a8"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:56:50 crc kubenswrapper[4747]: I1202 16:56:50.005698 4747 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0309c5e1-1f38-47c3-adc5-553675c969a8-util\") on node \"crc\" DevicePath \"\"" Dec 02 16:56:50 crc kubenswrapper[4747]: I1202 16:56:50.005745 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhq7l\" (UniqueName: \"kubernetes.io/projected/0309c5e1-1f38-47c3-adc5-553675c969a8-kube-api-access-bhq7l\") on node \"crc\" DevicePath \"\"" Dec 02 16:56:50 crc kubenswrapper[4747]: I1202 16:56:50.005780 4747 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0309c5e1-1f38-47c3-adc5-553675c969a8-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 16:56:50 crc kubenswrapper[4747]: I1202 16:56:50.528793 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" event={"ID":"0309c5e1-1f38-47c3-adc5-553675c969a8","Type":"ContainerDied","Data":"2be60ecc40589f837c50c4d987e8b7edb51fef4fcb7e72458d06fcabcbc26156"} Dec 02 16:56:50 crc kubenswrapper[4747]: I1202 16:56:50.528849 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2be60ecc40589f837c50c4d987e8b7edb51fef4fcb7e72458d06fcabcbc26156" Dec 02 16:56:50 crc kubenswrapper[4747]: I1202 16:56:50.528937 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.249337 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4"] Dec 02 16:56:59 crc kubenswrapper[4747]: E1202 16:56:59.250538 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0309c5e1-1f38-47c3-adc5-553675c969a8" containerName="extract" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.250556 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0309c5e1-1f38-47c3-adc5-553675c969a8" containerName="extract" Dec 02 16:56:59 crc kubenswrapper[4747]: E1202 16:56:59.250573 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd81d86e-f692-4a5d-885f-9c37fa608b4f" containerName="console" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.250581 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd81d86e-f692-4a5d-885f-9c37fa608b4f" containerName="console" Dec 02 16:56:59 crc kubenswrapper[4747]: E1202 16:56:59.250594 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0309c5e1-1f38-47c3-adc5-553675c969a8" containerName="pull" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.250600 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0309c5e1-1f38-47c3-adc5-553675c969a8" containerName="pull" Dec 02 16:56:59 crc kubenswrapper[4747]: E1202 16:56:59.250610 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0309c5e1-1f38-47c3-adc5-553675c969a8" containerName="util" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.250627 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0309c5e1-1f38-47c3-adc5-553675c969a8" containerName="util" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.250738 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="0309c5e1-1f38-47c3-adc5-553675c969a8" containerName="extract" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.250751 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd81d86e-f692-4a5d-885f-9c37fa608b4f" containerName="console" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.251320 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.254199 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.254497 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.255418 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.257235 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-jh9vz" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.258389 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.288300 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4"] Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.439219 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5e16dbd4-9db6-47d1-bfc3-549dbb8067e9-apiservice-cert\") pod \"metallb-operator-controller-manager-98db7dd6f-hftd4\" (UID: \"5e16dbd4-9db6-47d1-bfc3-549dbb8067e9\") " pod="metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.439305 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5e16dbd4-9db6-47d1-bfc3-549dbb8067e9-webhook-cert\") pod \"metallb-operator-controller-manager-98db7dd6f-hftd4\" (UID: \"5e16dbd4-9db6-47d1-bfc3-549dbb8067e9\") " pod="metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.439344 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rml9p\" (UniqueName: \"kubernetes.io/projected/5e16dbd4-9db6-47d1-bfc3-549dbb8067e9-kube-api-access-rml9p\") pod \"metallb-operator-controller-manager-98db7dd6f-hftd4\" (UID: \"5e16dbd4-9db6-47d1-bfc3-549dbb8067e9\") " pod="metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.540511 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5e16dbd4-9db6-47d1-bfc3-549dbb8067e9-apiservice-cert\") pod \"metallb-operator-controller-manager-98db7dd6f-hftd4\" (UID: \"5e16dbd4-9db6-47d1-bfc3-549dbb8067e9\") " pod="metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.542461 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5e16dbd4-9db6-47d1-bfc3-549dbb8067e9-webhook-cert\") pod \"metallb-operator-controller-manager-98db7dd6f-hftd4\" (UID: \"5e16dbd4-9db6-47d1-bfc3-549dbb8067e9\") " pod="metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.542488 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rml9p\" (UniqueName: \"kubernetes.io/projected/5e16dbd4-9db6-47d1-bfc3-549dbb8067e9-kube-api-access-rml9p\") pod \"metallb-operator-controller-manager-98db7dd6f-hftd4\" (UID: \"5e16dbd4-9db6-47d1-bfc3-549dbb8067e9\") " pod="metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.550184 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5e16dbd4-9db6-47d1-bfc3-549dbb8067e9-apiservice-cert\") pod \"metallb-operator-controller-manager-98db7dd6f-hftd4\" (UID: \"5e16dbd4-9db6-47d1-bfc3-549dbb8067e9\") " pod="metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.550876 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5e16dbd4-9db6-47d1-bfc3-549dbb8067e9-webhook-cert\") pod \"metallb-operator-controller-manager-98db7dd6f-hftd4\" (UID: \"5e16dbd4-9db6-47d1-bfc3-549dbb8067e9\") " pod="metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.567473 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rml9p\" (UniqueName: \"kubernetes.io/projected/5e16dbd4-9db6-47d1-bfc3-549dbb8067e9-kube-api-access-rml9p\") pod \"metallb-operator-controller-manager-98db7dd6f-hftd4\" (UID: \"5e16dbd4-9db6-47d1-bfc3-549dbb8067e9\") " pod="metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.571287 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.650488 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm"] Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.658556 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.659369 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm"] Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.661664 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.662170 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-zx987" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.662421 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.746035 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c15d13db-952d-4732-9d31-9ba5e926796c-apiservice-cert\") pod \"metallb-operator-webhook-server-5448bdf48d-p2qrm\" (UID: \"c15d13db-952d-4732-9d31-9ba5e926796c\") " pod="metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.746179 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c15d13db-952d-4732-9d31-9ba5e926796c-webhook-cert\") pod \"metallb-operator-webhook-server-5448bdf48d-p2qrm\" (UID: \"c15d13db-952d-4732-9d31-9ba5e926796c\") " pod="metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.746211 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tlxh\" (UniqueName: \"kubernetes.io/projected/c15d13db-952d-4732-9d31-9ba5e926796c-kube-api-access-7tlxh\") pod \"metallb-operator-webhook-server-5448bdf48d-p2qrm\" (UID: \"c15d13db-952d-4732-9d31-9ba5e926796c\") " pod="metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.848147 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c15d13db-952d-4732-9d31-9ba5e926796c-apiservice-cert\") pod \"metallb-operator-webhook-server-5448bdf48d-p2qrm\" (UID: \"c15d13db-952d-4732-9d31-9ba5e926796c\") " pod="metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.848309 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c15d13db-952d-4732-9d31-9ba5e926796c-webhook-cert\") pod \"metallb-operator-webhook-server-5448bdf48d-p2qrm\" (UID: \"c15d13db-952d-4732-9d31-9ba5e926796c\") " pod="metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.848333 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tlxh\" (UniqueName: \"kubernetes.io/projected/c15d13db-952d-4732-9d31-9ba5e926796c-kube-api-access-7tlxh\") pod \"metallb-operator-webhook-server-5448bdf48d-p2qrm\" (UID: \"c15d13db-952d-4732-9d31-9ba5e926796c\") " pod="metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.867460 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c15d13db-952d-4732-9d31-9ba5e926796c-webhook-cert\") pod \"metallb-operator-webhook-server-5448bdf48d-p2qrm\" (UID: \"c15d13db-952d-4732-9d31-9ba5e926796c\") " pod="metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.879438 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c15d13db-952d-4732-9d31-9ba5e926796c-apiservice-cert\") pod \"metallb-operator-webhook-server-5448bdf48d-p2qrm\" (UID: \"c15d13db-952d-4732-9d31-9ba5e926796c\") " pod="metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.879870 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tlxh\" (UniqueName: \"kubernetes.io/projected/c15d13db-952d-4732-9d31-9ba5e926796c-kube-api-access-7tlxh\") pod \"metallb-operator-webhook-server-5448bdf48d-p2qrm\" (UID: \"c15d13db-952d-4732-9d31-9ba5e926796c\") " pod="metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm" Dec 02 16:56:59 crc kubenswrapper[4747]: I1202 16:56:59.980328 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm" Dec 02 16:57:00 crc kubenswrapper[4747]: I1202 16:57:00.202662 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4"] Dec 02 16:57:00 crc kubenswrapper[4747]: I1202 16:57:00.265782 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm"] Dec 02 16:57:00 crc kubenswrapper[4747]: W1202 16:57:00.272341 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc15d13db_952d_4732_9d31_9ba5e926796c.slice/crio-2d93355d4fc9c5f09ec3a98e88525662ff64728dd2e6111842ec69853b080cad WatchSource:0}: Error finding container 2d93355d4fc9c5f09ec3a98e88525662ff64728dd2e6111842ec69853b080cad: Status 404 returned error can't find the container with id 2d93355d4fc9c5f09ec3a98e88525662ff64728dd2e6111842ec69853b080cad Dec 02 16:57:00 crc kubenswrapper[4747]: I1202 16:57:00.615665 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4" event={"ID":"5e16dbd4-9db6-47d1-bfc3-549dbb8067e9","Type":"ContainerStarted","Data":"e7450c6a3bbe698968dedb09a656ac88af3521f62d6106e160e0d0a93db249cb"} Dec 02 16:57:00 crc kubenswrapper[4747]: I1202 16:57:00.617824 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm" event={"ID":"c15d13db-952d-4732-9d31-9ba5e926796c","Type":"ContainerStarted","Data":"2d93355d4fc9c5f09ec3a98e88525662ff64728dd2e6111842ec69853b080cad"} Dec 02 16:57:01 crc kubenswrapper[4747]: I1202 16:57:01.794961 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 16:57:01 crc kubenswrapper[4747]: I1202 16:57:01.795052 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 16:57:01 crc kubenswrapper[4747]: I1202 16:57:01.795120 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 16:57:01 crc kubenswrapper[4747]: I1202 16:57:01.796007 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5bac94c471a07d1efa3cacb30b33cf6d0f493cee57eaf241d36618d258608c30"} pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 16:57:01 crc kubenswrapper[4747]: I1202 16:57:01.796084 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" containerID="cri-o://5bac94c471a07d1efa3cacb30b33cf6d0f493cee57eaf241d36618d258608c30" gracePeriod=600 Dec 02 16:57:02 crc kubenswrapper[4747]: I1202 16:57:02.643533 4747 generic.go:334] "Generic (PLEG): container finished" podID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerID="5bac94c471a07d1efa3cacb30b33cf6d0f493cee57eaf241d36618d258608c30" exitCode=0 Dec 02 16:57:02 crc kubenswrapper[4747]: I1202 16:57:02.643631 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerDied","Data":"5bac94c471a07d1efa3cacb30b33cf6d0f493cee57eaf241d36618d258608c30"} Dec 02 16:57:02 crc kubenswrapper[4747]: I1202 16:57:02.644090 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"dd912523cc5101dd05f8356cae810078de73ddb22c7e5af9901013b7a8b2bc0c"} Dec 02 16:57:02 crc kubenswrapper[4747]: I1202 16:57:02.644128 4747 scope.go:117] "RemoveContainer" containerID="69159044e2a0a0955de6d7cd4a22699a8999351fe5ec6a3aa52db65ea45c9085" Dec 02 16:57:06 crc kubenswrapper[4747]: I1202 16:57:06.684304 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4" event={"ID":"5e16dbd4-9db6-47d1-bfc3-549dbb8067e9","Type":"ContainerStarted","Data":"ca134ac0db1f1116399a577384f0951c35c443b8b909ee0bc9e399255ff4dc76"} Dec 02 16:57:06 crc kubenswrapper[4747]: I1202 16:57:06.685287 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4" Dec 02 16:57:06 crc kubenswrapper[4747]: I1202 16:57:06.686738 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm" event={"ID":"c15d13db-952d-4732-9d31-9ba5e926796c","Type":"ContainerStarted","Data":"775c2775fde2590b0cbb42368006b2a609bf7b801e97663f0a3828176dfa83e2"} Dec 02 16:57:06 crc kubenswrapper[4747]: I1202 16:57:06.686894 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm" Dec 02 16:57:06 crc kubenswrapper[4747]: I1202 16:57:06.706893 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4" podStartSLOduration=1.95808955 podStartE2EDuration="7.706874714s" podCreationTimestamp="2025-12-02 16:56:59 +0000 UTC" firstStartedPulling="2025-12-02 16:57:00.217597177 +0000 UTC m=+850.744485926" lastFinishedPulling="2025-12-02 16:57:05.966382341 +0000 UTC m=+856.493271090" observedRunningTime="2025-12-02 16:57:06.70320138 +0000 UTC m=+857.230090129" watchObservedRunningTime="2025-12-02 16:57:06.706874714 +0000 UTC m=+857.233763463" Dec 02 16:57:06 crc kubenswrapper[4747]: I1202 16:57:06.728211 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm" podStartSLOduration=2.028659192 podStartE2EDuration="7.728185292s" podCreationTimestamp="2025-12-02 16:56:59 +0000 UTC" firstStartedPulling="2025-12-02 16:57:00.276617739 +0000 UTC m=+850.803506488" lastFinishedPulling="2025-12-02 16:57:05.976143839 +0000 UTC m=+856.503032588" observedRunningTime="2025-12-02 16:57:06.725793913 +0000 UTC m=+857.252682672" watchObservedRunningTime="2025-12-02 16:57:06.728185292 +0000 UTC m=+857.255074041" Dec 02 16:57:19 crc kubenswrapper[4747]: I1202 16:57:19.986815 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5448bdf48d-p2qrm" Dec 02 16:57:39 crc kubenswrapper[4747]: I1202 16:57:39.577790 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-98db7dd6f-hftd4" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.377321 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-llx6b"] Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.378521 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-llx6b" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.380748 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-zhb9s" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.380936 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.382278 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-p4tng"] Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.387931 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.392126 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.392191 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.398730 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-llx6b"] Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.496659 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxm9t\" (UniqueName: \"kubernetes.io/projected/35b75b4b-4452-4e1b-8571-fbafb78b130b-kube-api-access-jxm9t\") pod \"frr-k8s-webhook-server-7fcb986d4-llx6b\" (UID: \"35b75b4b-4452-4e1b-8571-fbafb78b130b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-llx6b" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.496748 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b1b3c8a9-1af8-495f-9e0b-397c645925bd-metrics\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.496789 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b1b3c8a9-1af8-495f-9e0b-397c645925bd-metrics-certs\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.496812 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b1b3c8a9-1af8-495f-9e0b-397c645925bd-reloader\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.496833 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b1b3c8a9-1af8-495f-9e0b-397c645925bd-frr-sockets\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.496853 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b1b3c8a9-1af8-495f-9e0b-397c645925bd-frr-conf\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.496892 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dz2lr\" (UniqueName: \"kubernetes.io/projected/b1b3c8a9-1af8-495f-9e0b-397c645925bd-kube-api-access-dz2lr\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.496941 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/35b75b4b-4452-4e1b-8571-fbafb78b130b-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-llx6b\" (UID: \"35b75b4b-4452-4e1b-8571-fbafb78b130b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-llx6b" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.496963 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b1b3c8a9-1af8-495f-9e0b-397c645925bd-frr-startup\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.499190 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-7vlkv"] Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.500677 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-7vlkv" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.506094 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.506114 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.506272 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-8c6qb" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.507041 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.539983 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-hkpjf"] Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.541351 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-hkpjf" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.543742 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.590170 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-hkpjf"] Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.598278 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b1b3c8a9-1af8-495f-9e0b-397c645925bd-metrics\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.598335 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b1b3c8a9-1af8-495f-9e0b-397c645925bd-metrics-certs\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.598360 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b1b3c8a9-1af8-495f-9e0b-397c645925bd-frr-sockets\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.598378 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b1b3c8a9-1af8-495f-9e0b-397c645925bd-reloader\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.598398 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b1b3c8a9-1af8-495f-9e0b-397c645925bd-frr-conf\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.598443 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dz2lr\" (UniqueName: \"kubernetes.io/projected/b1b3c8a9-1af8-495f-9e0b-397c645925bd-kube-api-access-dz2lr\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.598471 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/35b75b4b-4452-4e1b-8571-fbafb78b130b-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-llx6b\" (UID: \"35b75b4b-4452-4e1b-8571-fbafb78b130b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-llx6b" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.598496 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b1b3c8a9-1af8-495f-9e0b-397c645925bd-frr-startup\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.598551 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxm9t\" (UniqueName: \"kubernetes.io/projected/35b75b4b-4452-4e1b-8571-fbafb78b130b-kube-api-access-jxm9t\") pod \"frr-k8s-webhook-server-7fcb986d4-llx6b\" (UID: \"35b75b4b-4452-4e1b-8571-fbafb78b130b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-llx6b" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.599380 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b1b3c8a9-1af8-495f-9e0b-397c645925bd-metrics\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.607413 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b1b3c8a9-1af8-495f-9e0b-397c645925bd-frr-conf\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.607995 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b1b3c8a9-1af8-495f-9e0b-397c645925bd-frr-sockets\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.608254 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b1b3c8a9-1af8-495f-9e0b-397c645925bd-reloader\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.608982 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b1b3c8a9-1af8-495f-9e0b-397c645925bd-frr-startup\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.609618 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b1b3c8a9-1af8-495f-9e0b-397c645925bd-metrics-certs\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.612657 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/35b75b4b-4452-4e1b-8571-fbafb78b130b-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-llx6b\" (UID: \"35b75b4b-4452-4e1b-8571-fbafb78b130b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-llx6b" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.620884 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxm9t\" (UniqueName: \"kubernetes.io/projected/35b75b4b-4452-4e1b-8571-fbafb78b130b-kube-api-access-jxm9t\") pod \"frr-k8s-webhook-server-7fcb986d4-llx6b\" (UID: \"35b75b4b-4452-4e1b-8571-fbafb78b130b\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-llx6b" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.636774 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dz2lr\" (UniqueName: \"kubernetes.io/projected/b1b3c8a9-1af8-495f-9e0b-397c645925bd-kube-api-access-dz2lr\") pod \"frr-k8s-p4tng\" (UID: \"b1b3c8a9-1af8-495f-9e0b-397c645925bd\") " pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.700217 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3dd4af5f-feb2-49f2-ade6-35794a19f8b8-cert\") pod \"controller-f8648f98b-hkpjf\" (UID: \"3dd4af5f-feb2-49f2-ade6-35794a19f8b8\") " pod="metallb-system/controller-f8648f98b-hkpjf" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.700636 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdz4z\" (UniqueName: \"kubernetes.io/projected/b7db8655-8adb-4345-b69a-574f2fbbffcb-kube-api-access-rdz4z\") pod \"speaker-7vlkv\" (UID: \"b7db8655-8adb-4345-b69a-574f2fbbffcb\") " pod="metallb-system/speaker-7vlkv" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.700723 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b7db8655-8adb-4345-b69a-574f2fbbffcb-memberlist\") pod \"speaker-7vlkv\" (UID: \"b7db8655-8adb-4345-b69a-574f2fbbffcb\") " pod="metallb-system/speaker-7vlkv" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.700834 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b7db8655-8adb-4345-b69a-574f2fbbffcb-metallb-excludel2\") pod \"speaker-7vlkv\" (UID: \"b7db8655-8adb-4345-b69a-574f2fbbffcb\") " pod="metallb-system/speaker-7vlkv" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.700994 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3dd4af5f-feb2-49f2-ade6-35794a19f8b8-metrics-certs\") pod \"controller-f8648f98b-hkpjf\" (UID: \"3dd4af5f-feb2-49f2-ade6-35794a19f8b8\") " pod="metallb-system/controller-f8648f98b-hkpjf" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.701112 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2w297\" (UniqueName: \"kubernetes.io/projected/3dd4af5f-feb2-49f2-ade6-35794a19f8b8-kube-api-access-2w297\") pod \"controller-f8648f98b-hkpjf\" (UID: \"3dd4af5f-feb2-49f2-ade6-35794a19f8b8\") " pod="metallb-system/controller-f8648f98b-hkpjf" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.701168 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b7db8655-8adb-4345-b69a-574f2fbbffcb-metrics-certs\") pod \"speaker-7vlkv\" (UID: \"b7db8655-8adb-4345-b69a-574f2fbbffcb\") " pod="metallb-system/speaker-7vlkv" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.706711 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-llx6b" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.714765 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.804042 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdz4z\" (UniqueName: \"kubernetes.io/projected/b7db8655-8adb-4345-b69a-574f2fbbffcb-kube-api-access-rdz4z\") pod \"speaker-7vlkv\" (UID: \"b7db8655-8adb-4345-b69a-574f2fbbffcb\") " pod="metallb-system/speaker-7vlkv" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.804117 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b7db8655-8adb-4345-b69a-574f2fbbffcb-memberlist\") pod \"speaker-7vlkv\" (UID: \"b7db8655-8adb-4345-b69a-574f2fbbffcb\") " pod="metallb-system/speaker-7vlkv" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.804166 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b7db8655-8adb-4345-b69a-574f2fbbffcb-metallb-excludel2\") pod \"speaker-7vlkv\" (UID: \"b7db8655-8adb-4345-b69a-574f2fbbffcb\") " pod="metallb-system/speaker-7vlkv" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.804199 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3dd4af5f-feb2-49f2-ade6-35794a19f8b8-metrics-certs\") pod \"controller-f8648f98b-hkpjf\" (UID: \"3dd4af5f-feb2-49f2-ade6-35794a19f8b8\") " pod="metallb-system/controller-f8648f98b-hkpjf" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.804264 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2w297\" (UniqueName: \"kubernetes.io/projected/3dd4af5f-feb2-49f2-ade6-35794a19f8b8-kube-api-access-2w297\") pod \"controller-f8648f98b-hkpjf\" (UID: \"3dd4af5f-feb2-49f2-ade6-35794a19f8b8\") " pod="metallb-system/controller-f8648f98b-hkpjf" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.804293 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b7db8655-8adb-4345-b69a-574f2fbbffcb-metrics-certs\") pod \"speaker-7vlkv\" (UID: \"b7db8655-8adb-4345-b69a-574f2fbbffcb\") " pod="metallb-system/speaker-7vlkv" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.804374 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3dd4af5f-feb2-49f2-ade6-35794a19f8b8-cert\") pod \"controller-f8648f98b-hkpjf\" (UID: \"3dd4af5f-feb2-49f2-ade6-35794a19f8b8\") " pod="metallb-system/controller-f8648f98b-hkpjf" Dec 02 16:57:40 crc kubenswrapper[4747]: E1202 16:57:40.805076 4747 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 02 16:57:40 crc kubenswrapper[4747]: E1202 16:57:40.805155 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b7db8655-8adb-4345-b69a-574f2fbbffcb-memberlist podName:b7db8655-8adb-4345-b69a-574f2fbbffcb nodeName:}" failed. No retries permitted until 2025-12-02 16:57:41.305135211 +0000 UTC m=+891.832023960 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/b7db8655-8adb-4345-b69a-574f2fbbffcb-memberlist") pod "speaker-7vlkv" (UID: "b7db8655-8adb-4345-b69a-574f2fbbffcb") : secret "metallb-memberlist" not found Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.805820 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b7db8655-8adb-4345-b69a-574f2fbbffcb-metallb-excludel2\") pod \"speaker-7vlkv\" (UID: \"b7db8655-8adb-4345-b69a-574f2fbbffcb\") " pod="metallb-system/speaker-7vlkv" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.810201 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b7db8655-8adb-4345-b69a-574f2fbbffcb-metrics-certs\") pod \"speaker-7vlkv\" (UID: \"b7db8655-8adb-4345-b69a-574f2fbbffcb\") " pod="metallb-system/speaker-7vlkv" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.810548 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3dd4af5f-feb2-49f2-ade6-35794a19f8b8-cert\") pod \"controller-f8648f98b-hkpjf\" (UID: \"3dd4af5f-feb2-49f2-ade6-35794a19f8b8\") " pod="metallb-system/controller-f8648f98b-hkpjf" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.810931 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3dd4af5f-feb2-49f2-ade6-35794a19f8b8-metrics-certs\") pod \"controller-f8648f98b-hkpjf\" (UID: \"3dd4af5f-feb2-49f2-ade6-35794a19f8b8\") " pod="metallb-system/controller-f8648f98b-hkpjf" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.832726 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2w297\" (UniqueName: \"kubernetes.io/projected/3dd4af5f-feb2-49f2-ade6-35794a19f8b8-kube-api-access-2w297\") pod \"controller-f8648f98b-hkpjf\" (UID: \"3dd4af5f-feb2-49f2-ade6-35794a19f8b8\") " pod="metallb-system/controller-f8648f98b-hkpjf" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.848848 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdz4z\" (UniqueName: \"kubernetes.io/projected/b7db8655-8adb-4345-b69a-574f2fbbffcb-kube-api-access-rdz4z\") pod \"speaker-7vlkv\" (UID: \"b7db8655-8adb-4345-b69a-574f2fbbffcb\") " pod="metallb-system/speaker-7vlkv" Dec 02 16:57:40 crc kubenswrapper[4747]: I1202 16:57:40.861373 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-hkpjf" Dec 02 16:57:41 crc kubenswrapper[4747]: I1202 16:57:41.067144 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-llx6b"] Dec 02 16:57:41 crc kubenswrapper[4747]: W1202 16:57:41.073563 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35b75b4b_4452_4e1b_8571_fbafb78b130b.slice/crio-0c1ed57778ee8842b80f5f4b9b237a5acf891a98fb3500f6f74a863bee3bdeaf WatchSource:0}: Error finding container 0c1ed57778ee8842b80f5f4b9b237a5acf891a98fb3500f6f74a863bee3bdeaf: Status 404 returned error can't find the container with id 0c1ed57778ee8842b80f5f4b9b237a5acf891a98fb3500f6f74a863bee3bdeaf Dec 02 16:57:41 crc kubenswrapper[4747]: I1202 16:57:41.226781 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-hkpjf"] Dec 02 16:57:41 crc kubenswrapper[4747]: I1202 16:57:41.315285 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b7db8655-8adb-4345-b69a-574f2fbbffcb-memberlist\") pod \"speaker-7vlkv\" (UID: \"b7db8655-8adb-4345-b69a-574f2fbbffcb\") " pod="metallb-system/speaker-7vlkv" Dec 02 16:57:41 crc kubenswrapper[4747]: E1202 16:57:41.315481 4747 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 02 16:57:41 crc kubenswrapper[4747]: E1202 16:57:41.315774 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b7db8655-8adb-4345-b69a-574f2fbbffcb-memberlist podName:b7db8655-8adb-4345-b69a-574f2fbbffcb nodeName:}" failed. No retries permitted until 2025-12-02 16:57:42.315740582 +0000 UTC m=+892.842629331 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/b7db8655-8adb-4345-b69a-574f2fbbffcb-memberlist") pod "speaker-7vlkv" (UID: "b7db8655-8adb-4345-b69a-574f2fbbffcb") : secret "metallb-memberlist" not found Dec 02 16:57:41 crc kubenswrapper[4747]: I1202 16:57:41.930656 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p4tng" event={"ID":"b1b3c8a9-1af8-495f-9e0b-397c645925bd","Type":"ContainerStarted","Data":"8e191df78cc191b891932f01e7384cb75dc5c6198cb4631f75cdcb77771d66c0"} Dec 02 16:57:41 crc kubenswrapper[4747]: I1202 16:57:41.932124 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-llx6b" event={"ID":"35b75b4b-4452-4e1b-8571-fbafb78b130b","Type":"ContainerStarted","Data":"0c1ed57778ee8842b80f5f4b9b237a5acf891a98fb3500f6f74a863bee3bdeaf"} Dec 02 16:57:41 crc kubenswrapper[4747]: I1202 16:57:41.934150 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-hkpjf" event={"ID":"3dd4af5f-feb2-49f2-ade6-35794a19f8b8","Type":"ContainerStarted","Data":"97d2c5fb614713c9940f78ebdc52692dd7a94d74578f1c409ac57adb15f2bd2a"} Dec 02 16:57:41 crc kubenswrapper[4747]: I1202 16:57:41.934191 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-hkpjf" event={"ID":"3dd4af5f-feb2-49f2-ade6-35794a19f8b8","Type":"ContainerStarted","Data":"ba8e44935aba062f356d1258cd7f985bd3b25db01608be70b2d0f50092519561"} Dec 02 16:57:41 crc kubenswrapper[4747]: I1202 16:57:41.934210 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-hkpjf" event={"ID":"3dd4af5f-feb2-49f2-ade6-35794a19f8b8","Type":"ContainerStarted","Data":"7dde04d79fe5421215dc89ca42c5d17b2235432efdc11ee790e3996774336e5e"} Dec 02 16:57:41 crc kubenswrapper[4747]: I1202 16:57:41.934336 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-hkpjf" Dec 02 16:57:41 crc kubenswrapper[4747]: I1202 16:57:41.959193 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-hkpjf" podStartSLOduration=1.95917043 podStartE2EDuration="1.95917043s" podCreationTimestamp="2025-12-02 16:57:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:57:41.95778876 +0000 UTC m=+892.484677529" watchObservedRunningTime="2025-12-02 16:57:41.95917043 +0000 UTC m=+892.486059189" Dec 02 16:57:42 crc kubenswrapper[4747]: I1202 16:57:42.333311 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b7db8655-8adb-4345-b69a-574f2fbbffcb-memberlist\") pod \"speaker-7vlkv\" (UID: \"b7db8655-8adb-4345-b69a-574f2fbbffcb\") " pod="metallb-system/speaker-7vlkv" Dec 02 16:57:42 crc kubenswrapper[4747]: I1202 16:57:42.341660 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b7db8655-8adb-4345-b69a-574f2fbbffcb-memberlist\") pod \"speaker-7vlkv\" (UID: \"b7db8655-8adb-4345-b69a-574f2fbbffcb\") " pod="metallb-system/speaker-7vlkv" Dec 02 16:57:42 crc kubenswrapper[4747]: I1202 16:57:42.619140 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-7vlkv" Dec 02 16:57:42 crc kubenswrapper[4747]: W1202 16:57:42.695628 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb7db8655_8adb_4345_b69a_574f2fbbffcb.slice/crio-bcccb254be45dcfbb2622ab560aaf003907830d95f308406db7086986d57be03 WatchSource:0}: Error finding container bcccb254be45dcfbb2622ab560aaf003907830d95f308406db7086986d57be03: Status 404 returned error can't find the container with id bcccb254be45dcfbb2622ab560aaf003907830d95f308406db7086986d57be03 Dec 02 16:57:42 crc kubenswrapper[4747]: I1202 16:57:42.945096 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-7vlkv" event={"ID":"b7db8655-8adb-4345-b69a-574f2fbbffcb","Type":"ContainerStarted","Data":"bcccb254be45dcfbb2622ab560aaf003907830d95f308406db7086986d57be03"} Dec 02 16:57:43 crc kubenswrapper[4747]: I1202 16:57:43.970529 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-7vlkv" event={"ID":"b7db8655-8adb-4345-b69a-574f2fbbffcb","Type":"ContainerStarted","Data":"ec9ca6e5a4b72ced7b1d0c2dbba857f820b6288065aaa90e99e2fc6434cb9a24"} Dec 02 16:57:43 crc kubenswrapper[4747]: I1202 16:57:43.976129 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-7vlkv" Dec 02 16:57:43 crc kubenswrapper[4747]: I1202 16:57:43.976183 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-7vlkv" event={"ID":"b7db8655-8adb-4345-b69a-574f2fbbffcb","Type":"ContainerStarted","Data":"5ebc55e5c03cc2e334f4f7e792c73aa395d66c5e1c2f3b0210717fa447db19bd"} Dec 02 16:57:43 crc kubenswrapper[4747]: I1202 16:57:43.997336 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-7vlkv" podStartSLOduration=3.997311425 podStartE2EDuration="3.997311425s" podCreationTimestamp="2025-12-02 16:57:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:57:43.99361464 +0000 UTC m=+894.520503399" watchObservedRunningTime="2025-12-02 16:57:43.997311425 +0000 UTC m=+894.524200174" Dec 02 16:57:53 crc kubenswrapper[4747]: I1202 16:57:53.207571 4747 generic.go:334] "Generic (PLEG): container finished" podID="b1b3c8a9-1af8-495f-9e0b-397c645925bd" containerID="b1b87585e1f80542ec8788f1f8a6b1853c7f99fee8bcce5ee373e8a14553525d" exitCode=0 Dec 02 16:57:53 crc kubenswrapper[4747]: I1202 16:57:53.207652 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p4tng" event={"ID":"b1b3c8a9-1af8-495f-9e0b-397c645925bd","Type":"ContainerDied","Data":"b1b87585e1f80542ec8788f1f8a6b1853c7f99fee8bcce5ee373e8a14553525d"} Dec 02 16:57:53 crc kubenswrapper[4747]: I1202 16:57:53.210763 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-llx6b" event={"ID":"35b75b4b-4452-4e1b-8571-fbafb78b130b","Type":"ContainerStarted","Data":"8d0e5f36d37fc32b9d6347bb636bbae28dbb219d7ee35961bb166f71dc478025"} Dec 02 16:57:53 crc kubenswrapper[4747]: I1202 16:57:53.210939 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-llx6b" Dec 02 16:57:53 crc kubenswrapper[4747]: I1202 16:57:53.262931 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-llx6b" podStartSLOduration=1.787216299 podStartE2EDuration="13.262699489s" podCreationTimestamp="2025-12-02 16:57:40 +0000 UTC" firstStartedPulling="2025-12-02 16:57:41.076170745 +0000 UTC m=+891.603059494" lastFinishedPulling="2025-12-02 16:57:52.551653935 +0000 UTC m=+903.078542684" observedRunningTime="2025-12-02 16:57:53.259438836 +0000 UTC m=+903.786327595" watchObservedRunningTime="2025-12-02 16:57:53.262699489 +0000 UTC m=+903.789588248" Dec 02 16:57:54 crc kubenswrapper[4747]: I1202 16:57:54.220714 4747 generic.go:334] "Generic (PLEG): container finished" podID="b1b3c8a9-1af8-495f-9e0b-397c645925bd" containerID="c18d83fc76858fbf9cab1b7cf42df26e1cc71c99026e233cbcf6bbe6e0cd4955" exitCode=0 Dec 02 16:57:54 crc kubenswrapper[4747]: I1202 16:57:54.220868 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p4tng" event={"ID":"b1b3c8a9-1af8-495f-9e0b-397c645925bd","Type":"ContainerDied","Data":"c18d83fc76858fbf9cab1b7cf42df26e1cc71c99026e233cbcf6bbe6e0cd4955"} Dec 02 16:57:55 crc kubenswrapper[4747]: I1202 16:57:55.231368 4747 generic.go:334] "Generic (PLEG): container finished" podID="b1b3c8a9-1af8-495f-9e0b-397c645925bd" containerID="0fa5335b8a44ad60577dce5e421d305649c7d3a7b11a85aa85d8077fa3b15aef" exitCode=0 Dec 02 16:57:55 crc kubenswrapper[4747]: I1202 16:57:55.231454 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p4tng" event={"ID":"b1b3c8a9-1af8-495f-9e0b-397c645925bd","Type":"ContainerDied","Data":"0fa5335b8a44ad60577dce5e421d305649c7d3a7b11a85aa85d8077fa3b15aef"} Dec 02 16:57:56 crc kubenswrapper[4747]: I1202 16:57:56.243929 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p4tng" event={"ID":"b1b3c8a9-1af8-495f-9e0b-397c645925bd","Type":"ContainerStarted","Data":"68f6211241886ae087f97484fc9fd8d8a494afc4c35d8226ae31da0dcce003de"} Dec 02 16:57:56 crc kubenswrapper[4747]: I1202 16:57:56.244346 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p4tng" event={"ID":"b1b3c8a9-1af8-495f-9e0b-397c645925bd","Type":"ContainerStarted","Data":"b42f89e27729d0b4795e499f1172ea31d736674bd6604a705401b207b4eb08b1"} Dec 02 16:57:56 crc kubenswrapper[4747]: I1202 16:57:56.244366 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p4tng" event={"ID":"b1b3c8a9-1af8-495f-9e0b-397c645925bd","Type":"ContainerStarted","Data":"63468c9279591bafa02b70ea125eb0d3de38775572729a445142a6b1f4d5db32"} Dec 02 16:57:56 crc kubenswrapper[4747]: I1202 16:57:56.244376 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p4tng" event={"ID":"b1b3c8a9-1af8-495f-9e0b-397c645925bd","Type":"ContainerStarted","Data":"e7c5c476992359a5ec17b5dea6e2865aa38b36342fd51cec9c92f12c9132f9fd"} Dec 02 16:57:57 crc kubenswrapper[4747]: I1202 16:57:57.257156 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p4tng" event={"ID":"b1b3c8a9-1af8-495f-9e0b-397c645925bd","Type":"ContainerStarted","Data":"6841f3aa86ed521cbd1a029071cb65ef3224ea026a3fedeec372165940577a6e"} Dec 02 16:57:58 crc kubenswrapper[4747]: I1202 16:57:58.274017 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p4tng" event={"ID":"b1b3c8a9-1af8-495f-9e0b-397c645925bd","Type":"ContainerStarted","Data":"3d9a8b00f1091b5c5d4b26b1afb7d6c7fbaed39574cf5faf4a385ff593bee582"} Dec 02 16:57:58 crc kubenswrapper[4747]: I1202 16:57:58.275107 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-p4tng" Dec 02 16:57:58 crc kubenswrapper[4747]: I1202 16:57:58.307049 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-p4tng" podStartSLOduration=6.667103061 podStartE2EDuration="18.307028227s" podCreationTimestamp="2025-12-02 16:57:40 +0000 UTC" firstStartedPulling="2025-12-02 16:57:40.930154534 +0000 UTC m=+891.457043283" lastFinishedPulling="2025-12-02 16:57:52.5700797 +0000 UTC m=+903.096968449" observedRunningTime="2025-12-02 16:57:58.300141021 +0000 UTC m=+908.827029770" watchObservedRunningTime="2025-12-02 16:57:58.307028227 +0000 UTC m=+908.833916976" Dec 02 16:58:00 crc kubenswrapper[4747]: I1202 16:58:00.715853 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-p4tng" Dec 02 16:58:00 crc kubenswrapper[4747]: I1202 16:58:00.762085 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-p4tng" Dec 02 16:58:00 crc kubenswrapper[4747]: I1202 16:58:00.868223 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-hkpjf" Dec 02 16:58:02 crc kubenswrapper[4747]: I1202 16:58:02.625747 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-7vlkv" Dec 02 16:58:05 crc kubenswrapper[4747]: I1202 16:58:05.671764 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-5vkf2"] Dec 02 16:58:05 crc kubenswrapper[4747]: I1202 16:58:05.673183 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5vkf2" Dec 02 16:58:05 crc kubenswrapper[4747]: I1202 16:58:05.675933 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 02 16:58:05 crc kubenswrapper[4747]: I1202 16:58:05.679865 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 02 16:58:05 crc kubenswrapper[4747]: I1202 16:58:05.685893 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-v9cb2" Dec 02 16:58:05 crc kubenswrapper[4747]: I1202 16:58:05.693458 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-5vkf2"] Dec 02 16:58:05 crc kubenswrapper[4747]: I1202 16:58:05.778555 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sz442\" (UniqueName: \"kubernetes.io/projected/356e180a-bc88-4f54-807e-0e9ba461a5fc-kube-api-access-sz442\") pod \"openstack-operator-index-5vkf2\" (UID: \"356e180a-bc88-4f54-807e-0e9ba461a5fc\") " pod="openstack-operators/openstack-operator-index-5vkf2" Dec 02 16:58:05 crc kubenswrapper[4747]: I1202 16:58:05.879956 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sz442\" (UniqueName: \"kubernetes.io/projected/356e180a-bc88-4f54-807e-0e9ba461a5fc-kube-api-access-sz442\") pod \"openstack-operator-index-5vkf2\" (UID: \"356e180a-bc88-4f54-807e-0e9ba461a5fc\") " pod="openstack-operators/openstack-operator-index-5vkf2" Dec 02 16:58:05 crc kubenswrapper[4747]: I1202 16:58:05.915006 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sz442\" (UniqueName: \"kubernetes.io/projected/356e180a-bc88-4f54-807e-0e9ba461a5fc-kube-api-access-sz442\") pod \"openstack-operator-index-5vkf2\" (UID: \"356e180a-bc88-4f54-807e-0e9ba461a5fc\") " pod="openstack-operators/openstack-operator-index-5vkf2" Dec 02 16:58:05 crc kubenswrapper[4747]: I1202 16:58:05.992889 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5vkf2" Dec 02 16:58:06 crc kubenswrapper[4747]: I1202 16:58:06.432494 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-5vkf2"] Dec 02 16:58:06 crc kubenswrapper[4747]: W1202 16:58:06.443971 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod356e180a_bc88_4f54_807e_0e9ba461a5fc.slice/crio-c05e8859516c97c6e25ced2001eaee854e6249365b0c27a1846ecc1a4af12854 WatchSource:0}: Error finding container c05e8859516c97c6e25ced2001eaee854e6249365b0c27a1846ecc1a4af12854: Status 404 returned error can't find the container with id c05e8859516c97c6e25ced2001eaee854e6249365b0c27a1846ecc1a4af12854 Dec 02 16:58:07 crc kubenswrapper[4747]: I1202 16:58:07.342832 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5vkf2" event={"ID":"356e180a-bc88-4f54-807e-0e9ba461a5fc","Type":"ContainerStarted","Data":"c05e8859516c97c6e25ced2001eaee854e6249365b0c27a1846ecc1a4af12854"} Dec 02 16:58:09 crc kubenswrapper[4747]: I1202 16:58:09.032976 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-5vkf2"] Dec 02 16:58:09 crc kubenswrapper[4747]: I1202 16:58:09.649820 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-br7nx"] Dec 02 16:58:09 crc kubenswrapper[4747]: I1202 16:58:09.651633 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-br7nx" Dec 02 16:58:09 crc kubenswrapper[4747]: I1202 16:58:09.660295 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-br7nx"] Dec 02 16:58:09 crc kubenswrapper[4747]: I1202 16:58:09.819808 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx9pj\" (UniqueName: \"kubernetes.io/projected/17f243ad-ab56-4cd4-9c45-1c665320e93d-kube-api-access-xx9pj\") pod \"openstack-operator-index-br7nx\" (UID: \"17f243ad-ab56-4cd4-9c45-1c665320e93d\") " pod="openstack-operators/openstack-operator-index-br7nx" Dec 02 16:58:09 crc kubenswrapper[4747]: I1202 16:58:09.923014 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx9pj\" (UniqueName: \"kubernetes.io/projected/17f243ad-ab56-4cd4-9c45-1c665320e93d-kube-api-access-xx9pj\") pod \"openstack-operator-index-br7nx\" (UID: \"17f243ad-ab56-4cd4-9c45-1c665320e93d\") " pod="openstack-operators/openstack-operator-index-br7nx" Dec 02 16:58:09 crc kubenswrapper[4747]: I1202 16:58:09.952987 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx9pj\" (UniqueName: \"kubernetes.io/projected/17f243ad-ab56-4cd4-9c45-1c665320e93d-kube-api-access-xx9pj\") pod \"openstack-operator-index-br7nx\" (UID: \"17f243ad-ab56-4cd4-9c45-1c665320e93d\") " pod="openstack-operators/openstack-operator-index-br7nx" Dec 02 16:58:10 crc kubenswrapper[4747]: I1202 16:58:10.001453 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-br7nx" Dec 02 16:58:10 crc kubenswrapper[4747]: I1202 16:58:10.392760 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5vkf2" event={"ID":"356e180a-bc88-4f54-807e-0e9ba461a5fc","Type":"ContainerStarted","Data":"3a66b6020844a49b7f7db8c3f090d4b50741b5633cbfac6a8a67f969ab9d2e10"} Dec 02 16:58:10 crc kubenswrapper[4747]: I1202 16:58:10.392939 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-5vkf2" podUID="356e180a-bc88-4f54-807e-0e9ba461a5fc" containerName="registry-server" containerID="cri-o://3a66b6020844a49b7f7db8c3f090d4b50741b5633cbfac6a8a67f969ab9d2e10" gracePeriod=2 Dec 02 16:58:10 crc kubenswrapper[4747]: I1202 16:58:10.423348 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-5vkf2" podStartSLOduration=1.840766816 podStartE2EDuration="5.423302688s" podCreationTimestamp="2025-12-02 16:58:05 +0000 UTC" firstStartedPulling="2025-12-02 16:58:06.448329689 +0000 UTC m=+916.975218458" lastFinishedPulling="2025-12-02 16:58:10.030865581 +0000 UTC m=+920.557754330" observedRunningTime="2025-12-02 16:58:10.409706471 +0000 UTC m=+920.936595260" watchObservedRunningTime="2025-12-02 16:58:10.423302688 +0000 UTC m=+920.950191477" Dec 02 16:58:10 crc kubenswrapper[4747]: I1202 16:58:10.504919 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-br7nx"] Dec 02 16:58:10 crc kubenswrapper[4747]: I1202 16:58:10.718858 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-llx6b" Dec 02 16:58:10 crc kubenswrapper[4747]: I1202 16:58:10.720102 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-p4tng" Dec 02 16:58:10 crc kubenswrapper[4747]: I1202 16:58:10.759748 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5vkf2" Dec 02 16:58:10 crc kubenswrapper[4747]: I1202 16:58:10.838510 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sz442\" (UniqueName: \"kubernetes.io/projected/356e180a-bc88-4f54-807e-0e9ba461a5fc-kube-api-access-sz442\") pod \"356e180a-bc88-4f54-807e-0e9ba461a5fc\" (UID: \"356e180a-bc88-4f54-807e-0e9ba461a5fc\") " Dec 02 16:58:10 crc kubenswrapper[4747]: I1202 16:58:10.845819 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/356e180a-bc88-4f54-807e-0e9ba461a5fc-kube-api-access-sz442" (OuterVolumeSpecName: "kube-api-access-sz442") pod "356e180a-bc88-4f54-807e-0e9ba461a5fc" (UID: "356e180a-bc88-4f54-807e-0e9ba461a5fc"). InnerVolumeSpecName "kube-api-access-sz442". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:58:10 crc kubenswrapper[4747]: I1202 16:58:10.940854 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sz442\" (UniqueName: \"kubernetes.io/projected/356e180a-bc88-4f54-807e-0e9ba461a5fc-kube-api-access-sz442\") on node \"crc\" DevicePath \"\"" Dec 02 16:58:11 crc kubenswrapper[4747]: I1202 16:58:11.405207 4747 generic.go:334] "Generic (PLEG): container finished" podID="356e180a-bc88-4f54-807e-0e9ba461a5fc" containerID="3a66b6020844a49b7f7db8c3f090d4b50741b5633cbfac6a8a67f969ab9d2e10" exitCode=0 Dec 02 16:58:11 crc kubenswrapper[4747]: I1202 16:58:11.405274 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5vkf2" Dec 02 16:58:11 crc kubenswrapper[4747]: I1202 16:58:11.405275 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5vkf2" event={"ID":"356e180a-bc88-4f54-807e-0e9ba461a5fc","Type":"ContainerDied","Data":"3a66b6020844a49b7f7db8c3f090d4b50741b5633cbfac6a8a67f969ab9d2e10"} Dec 02 16:58:11 crc kubenswrapper[4747]: I1202 16:58:11.405343 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5vkf2" event={"ID":"356e180a-bc88-4f54-807e-0e9ba461a5fc","Type":"ContainerDied","Data":"c05e8859516c97c6e25ced2001eaee854e6249365b0c27a1846ecc1a4af12854"} Dec 02 16:58:11 crc kubenswrapper[4747]: I1202 16:58:11.405366 4747 scope.go:117] "RemoveContainer" containerID="3a66b6020844a49b7f7db8c3f090d4b50741b5633cbfac6a8a67f969ab9d2e10" Dec 02 16:58:11 crc kubenswrapper[4747]: I1202 16:58:11.408113 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-br7nx" event={"ID":"17f243ad-ab56-4cd4-9c45-1c665320e93d","Type":"ContainerStarted","Data":"c77a892c12b4a6cd26db626b1f2649decc7e08d5699a46de6e18890675c37fd7"} Dec 02 16:58:11 crc kubenswrapper[4747]: I1202 16:58:11.408182 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-br7nx" event={"ID":"17f243ad-ab56-4cd4-9c45-1c665320e93d","Type":"ContainerStarted","Data":"17a514161702b446eeaa5fd053abdddfdadb2e3f77b905fdeade99964f510d37"} Dec 02 16:58:11 crc kubenswrapper[4747]: I1202 16:58:11.439756 4747 scope.go:117] "RemoveContainer" containerID="3a66b6020844a49b7f7db8c3f090d4b50741b5633cbfac6a8a67f969ab9d2e10" Dec 02 16:58:11 crc kubenswrapper[4747]: I1202 16:58:11.442090 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-br7nx" podStartSLOduration=2.380765864 podStartE2EDuration="2.442076652s" podCreationTimestamp="2025-12-02 16:58:09 +0000 UTC" firstStartedPulling="2025-12-02 16:58:10.546575253 +0000 UTC m=+921.073464022" lastFinishedPulling="2025-12-02 16:58:10.607886061 +0000 UTC m=+921.134774810" observedRunningTime="2025-12-02 16:58:11.441442134 +0000 UTC m=+921.968330883" watchObservedRunningTime="2025-12-02 16:58:11.442076652 +0000 UTC m=+921.968965401" Dec 02 16:58:11 crc kubenswrapper[4747]: E1202 16:58:11.445099 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a66b6020844a49b7f7db8c3f090d4b50741b5633cbfac6a8a67f969ab9d2e10\": container with ID starting with 3a66b6020844a49b7f7db8c3f090d4b50741b5633cbfac6a8a67f969ab9d2e10 not found: ID does not exist" containerID="3a66b6020844a49b7f7db8c3f090d4b50741b5633cbfac6a8a67f969ab9d2e10" Dec 02 16:58:11 crc kubenswrapper[4747]: I1202 16:58:11.445169 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a66b6020844a49b7f7db8c3f090d4b50741b5633cbfac6a8a67f969ab9d2e10"} err="failed to get container status \"3a66b6020844a49b7f7db8c3f090d4b50741b5633cbfac6a8a67f969ab9d2e10\": rpc error: code = NotFound desc = could not find container \"3a66b6020844a49b7f7db8c3f090d4b50741b5633cbfac6a8a67f969ab9d2e10\": container with ID starting with 3a66b6020844a49b7f7db8c3f090d4b50741b5633cbfac6a8a67f969ab9d2e10 not found: ID does not exist" Dec 02 16:58:11 crc kubenswrapper[4747]: I1202 16:58:11.468673 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-5vkf2"] Dec 02 16:58:11 crc kubenswrapper[4747]: I1202 16:58:11.474454 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-5vkf2"] Dec 02 16:58:11 crc kubenswrapper[4747]: I1202 16:58:11.773128 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="356e180a-bc88-4f54-807e-0e9ba461a5fc" path="/var/lib/kubelet/pods/356e180a-bc88-4f54-807e-0e9ba461a5fc/volumes" Dec 02 16:58:18 crc kubenswrapper[4747]: I1202 16:58:18.856692 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-j6cw4"] Dec 02 16:58:18 crc kubenswrapper[4747]: E1202 16:58:18.858187 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="356e180a-bc88-4f54-807e-0e9ba461a5fc" containerName="registry-server" Dec 02 16:58:18 crc kubenswrapper[4747]: I1202 16:58:18.858217 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="356e180a-bc88-4f54-807e-0e9ba461a5fc" containerName="registry-server" Dec 02 16:58:18 crc kubenswrapper[4747]: I1202 16:58:18.858484 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="356e180a-bc88-4f54-807e-0e9ba461a5fc" containerName="registry-server" Dec 02 16:58:18 crc kubenswrapper[4747]: I1202 16:58:18.860883 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j6cw4" Dec 02 16:58:18 crc kubenswrapper[4747]: I1202 16:58:18.877891 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j6cw4"] Dec 02 16:58:18 crc kubenswrapper[4747]: I1202 16:58:18.974479 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/184affdf-dfc3-49c6-b57d-65e529c4ff71-utilities\") pod \"redhat-marketplace-j6cw4\" (UID: \"184affdf-dfc3-49c6-b57d-65e529c4ff71\") " pod="openshift-marketplace/redhat-marketplace-j6cw4" Dec 02 16:58:18 crc kubenswrapper[4747]: I1202 16:58:18.974530 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/184affdf-dfc3-49c6-b57d-65e529c4ff71-catalog-content\") pod \"redhat-marketplace-j6cw4\" (UID: \"184affdf-dfc3-49c6-b57d-65e529c4ff71\") " pod="openshift-marketplace/redhat-marketplace-j6cw4" Dec 02 16:58:18 crc kubenswrapper[4747]: I1202 16:58:18.974595 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrxkb\" (UniqueName: \"kubernetes.io/projected/184affdf-dfc3-49c6-b57d-65e529c4ff71-kube-api-access-wrxkb\") pod \"redhat-marketplace-j6cw4\" (UID: \"184affdf-dfc3-49c6-b57d-65e529c4ff71\") " pod="openshift-marketplace/redhat-marketplace-j6cw4" Dec 02 16:58:19 crc kubenswrapper[4747]: I1202 16:58:19.076331 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/184affdf-dfc3-49c6-b57d-65e529c4ff71-utilities\") pod \"redhat-marketplace-j6cw4\" (UID: \"184affdf-dfc3-49c6-b57d-65e529c4ff71\") " pod="openshift-marketplace/redhat-marketplace-j6cw4" Dec 02 16:58:19 crc kubenswrapper[4747]: I1202 16:58:19.076387 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/184affdf-dfc3-49c6-b57d-65e529c4ff71-catalog-content\") pod \"redhat-marketplace-j6cw4\" (UID: \"184affdf-dfc3-49c6-b57d-65e529c4ff71\") " pod="openshift-marketplace/redhat-marketplace-j6cw4" Dec 02 16:58:19 crc kubenswrapper[4747]: I1202 16:58:19.076467 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrxkb\" (UniqueName: \"kubernetes.io/projected/184affdf-dfc3-49c6-b57d-65e529c4ff71-kube-api-access-wrxkb\") pod \"redhat-marketplace-j6cw4\" (UID: \"184affdf-dfc3-49c6-b57d-65e529c4ff71\") " pod="openshift-marketplace/redhat-marketplace-j6cw4" Dec 02 16:58:19 crc kubenswrapper[4747]: I1202 16:58:19.077206 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/184affdf-dfc3-49c6-b57d-65e529c4ff71-utilities\") pod \"redhat-marketplace-j6cw4\" (UID: \"184affdf-dfc3-49c6-b57d-65e529c4ff71\") " pod="openshift-marketplace/redhat-marketplace-j6cw4" Dec 02 16:58:19 crc kubenswrapper[4747]: I1202 16:58:19.077286 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/184affdf-dfc3-49c6-b57d-65e529c4ff71-catalog-content\") pod \"redhat-marketplace-j6cw4\" (UID: \"184affdf-dfc3-49c6-b57d-65e529c4ff71\") " pod="openshift-marketplace/redhat-marketplace-j6cw4" Dec 02 16:58:19 crc kubenswrapper[4747]: I1202 16:58:19.111995 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrxkb\" (UniqueName: \"kubernetes.io/projected/184affdf-dfc3-49c6-b57d-65e529c4ff71-kube-api-access-wrxkb\") pod \"redhat-marketplace-j6cw4\" (UID: \"184affdf-dfc3-49c6-b57d-65e529c4ff71\") " pod="openshift-marketplace/redhat-marketplace-j6cw4" Dec 02 16:58:19 crc kubenswrapper[4747]: I1202 16:58:19.192468 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j6cw4" Dec 02 16:58:19 crc kubenswrapper[4747]: I1202 16:58:19.440945 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j6cw4"] Dec 02 16:58:19 crc kubenswrapper[4747]: I1202 16:58:19.493495 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j6cw4" event={"ID":"184affdf-dfc3-49c6-b57d-65e529c4ff71","Type":"ContainerStarted","Data":"2e8cd2d5c3c130d48562d93c118885f36ffe0f71f5b6015de446d06d14f1af0b"} Dec 02 16:58:20 crc kubenswrapper[4747]: I1202 16:58:20.001800 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-br7nx" Dec 02 16:58:20 crc kubenswrapper[4747]: I1202 16:58:20.001888 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-br7nx" Dec 02 16:58:20 crc kubenswrapper[4747]: I1202 16:58:20.043975 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-br7nx" Dec 02 16:58:20 crc kubenswrapper[4747]: I1202 16:58:20.505754 4747 generic.go:334] "Generic (PLEG): container finished" podID="184affdf-dfc3-49c6-b57d-65e529c4ff71" containerID="cb3f23fc7a5589722f91011dc28a9180387d515aff540065d8cde3efda5a7b03" exitCode=0 Dec 02 16:58:20 crc kubenswrapper[4747]: I1202 16:58:20.508156 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j6cw4" event={"ID":"184affdf-dfc3-49c6-b57d-65e529c4ff71","Type":"ContainerDied","Data":"cb3f23fc7a5589722f91011dc28a9180387d515aff540065d8cde3efda5a7b03"} Dec 02 16:58:20 crc kubenswrapper[4747]: I1202 16:58:20.561054 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-br7nx" Dec 02 16:58:21 crc kubenswrapper[4747]: I1202 16:58:21.518021 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j6cw4" event={"ID":"184affdf-dfc3-49c6-b57d-65e529c4ff71","Type":"ContainerStarted","Data":"5d1340dffee67dad2a7246f220fcaacc8641bdebb7248b608e7b1aa8a138fc18"} Dec 02 16:58:22 crc kubenswrapper[4747]: I1202 16:58:22.530399 4747 generic.go:334] "Generic (PLEG): container finished" podID="184affdf-dfc3-49c6-b57d-65e529c4ff71" containerID="5d1340dffee67dad2a7246f220fcaacc8641bdebb7248b608e7b1aa8a138fc18" exitCode=0 Dec 02 16:58:22 crc kubenswrapper[4747]: I1202 16:58:22.530454 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j6cw4" event={"ID":"184affdf-dfc3-49c6-b57d-65e529c4ff71","Type":"ContainerDied","Data":"5d1340dffee67dad2a7246f220fcaacc8641bdebb7248b608e7b1aa8a138fc18"} Dec 02 16:58:23 crc kubenswrapper[4747]: I1202 16:58:23.541413 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j6cw4" event={"ID":"184affdf-dfc3-49c6-b57d-65e529c4ff71","Type":"ContainerStarted","Data":"d81438052ecb20a6c90522eb893d135d45e66c91b857fc9ee20fd7bba8b57703"} Dec 02 16:58:23 crc kubenswrapper[4747]: I1202 16:58:23.562933 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-j6cw4" podStartSLOduration=3.004442099 podStartE2EDuration="5.562888777s" podCreationTimestamp="2025-12-02 16:58:18 +0000 UTC" firstStartedPulling="2025-12-02 16:58:20.510801987 +0000 UTC m=+931.037690756" lastFinishedPulling="2025-12-02 16:58:23.069248675 +0000 UTC m=+933.596137434" observedRunningTime="2025-12-02 16:58:23.561347433 +0000 UTC m=+934.088236182" watchObservedRunningTime="2025-12-02 16:58:23.562888777 +0000 UTC m=+934.089777526" Dec 02 16:58:27 crc kubenswrapper[4747]: I1202 16:58:27.294608 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v"] Dec 02 16:58:27 crc kubenswrapper[4747]: I1202 16:58:27.296796 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" Dec 02 16:58:27 crc kubenswrapper[4747]: I1202 16:58:27.301213 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-dhx4c" Dec 02 16:58:27 crc kubenswrapper[4747]: I1202 16:58:27.317184 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v"] Dec 02 16:58:27 crc kubenswrapper[4747]: I1202 16:58:27.411944 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3755dfee-851d-47aa-95a3-85dc9da31e53-util\") pod \"7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v\" (UID: \"3755dfee-851d-47aa-95a3-85dc9da31e53\") " pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" Dec 02 16:58:27 crc kubenswrapper[4747]: I1202 16:58:27.412140 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3755dfee-851d-47aa-95a3-85dc9da31e53-bundle\") pod \"7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v\" (UID: \"3755dfee-851d-47aa-95a3-85dc9da31e53\") " pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" Dec 02 16:58:27 crc kubenswrapper[4747]: I1202 16:58:27.412246 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5q4k\" (UniqueName: \"kubernetes.io/projected/3755dfee-851d-47aa-95a3-85dc9da31e53-kube-api-access-m5q4k\") pod \"7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v\" (UID: \"3755dfee-851d-47aa-95a3-85dc9da31e53\") " pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" Dec 02 16:58:27 crc kubenswrapper[4747]: I1202 16:58:27.513587 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5q4k\" (UniqueName: \"kubernetes.io/projected/3755dfee-851d-47aa-95a3-85dc9da31e53-kube-api-access-m5q4k\") pod \"7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v\" (UID: \"3755dfee-851d-47aa-95a3-85dc9da31e53\") " pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" Dec 02 16:58:27 crc kubenswrapper[4747]: I1202 16:58:27.513774 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3755dfee-851d-47aa-95a3-85dc9da31e53-util\") pod \"7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v\" (UID: \"3755dfee-851d-47aa-95a3-85dc9da31e53\") " pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" Dec 02 16:58:27 crc kubenswrapper[4747]: I1202 16:58:27.513963 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3755dfee-851d-47aa-95a3-85dc9da31e53-bundle\") pod \"7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v\" (UID: \"3755dfee-851d-47aa-95a3-85dc9da31e53\") " pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" Dec 02 16:58:27 crc kubenswrapper[4747]: I1202 16:58:27.514606 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3755dfee-851d-47aa-95a3-85dc9da31e53-bundle\") pod \"7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v\" (UID: \"3755dfee-851d-47aa-95a3-85dc9da31e53\") " pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" Dec 02 16:58:27 crc kubenswrapper[4747]: I1202 16:58:27.514882 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3755dfee-851d-47aa-95a3-85dc9da31e53-util\") pod \"7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v\" (UID: \"3755dfee-851d-47aa-95a3-85dc9da31e53\") " pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" Dec 02 16:58:27 crc kubenswrapper[4747]: I1202 16:58:27.547011 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5q4k\" (UniqueName: \"kubernetes.io/projected/3755dfee-851d-47aa-95a3-85dc9da31e53-kube-api-access-m5q4k\") pod \"7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v\" (UID: \"3755dfee-851d-47aa-95a3-85dc9da31e53\") " pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" Dec 02 16:58:27 crc kubenswrapper[4747]: I1202 16:58:27.658264 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" Dec 02 16:58:27 crc kubenswrapper[4747]: I1202 16:58:27.943266 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v"] Dec 02 16:58:28 crc kubenswrapper[4747]: I1202 16:58:28.580490 4747 generic.go:334] "Generic (PLEG): container finished" podID="3755dfee-851d-47aa-95a3-85dc9da31e53" containerID="7794825bc7e8900ce7d03715015f754b1d643d5d482da37b768adaecd2dc5238" exitCode=0 Dec 02 16:58:28 crc kubenswrapper[4747]: I1202 16:58:28.580623 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" event={"ID":"3755dfee-851d-47aa-95a3-85dc9da31e53","Type":"ContainerDied","Data":"7794825bc7e8900ce7d03715015f754b1d643d5d482da37b768adaecd2dc5238"} Dec 02 16:58:28 crc kubenswrapper[4747]: I1202 16:58:28.580997 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" event={"ID":"3755dfee-851d-47aa-95a3-85dc9da31e53","Type":"ContainerStarted","Data":"3fba219670f11ca0de08e6e6070ec5c16b726701806bc5ff53a95a83cab4cbda"} Dec 02 16:58:29 crc kubenswrapper[4747]: I1202 16:58:29.194216 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-j6cw4" Dec 02 16:58:29 crc kubenswrapper[4747]: I1202 16:58:29.194292 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-j6cw4" Dec 02 16:58:29 crc kubenswrapper[4747]: I1202 16:58:29.240571 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-j6cw4" Dec 02 16:58:29 crc kubenswrapper[4747]: I1202 16:58:29.670762 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-j6cw4" Dec 02 16:58:30 crc kubenswrapper[4747]: I1202 16:58:30.600846 4747 generic.go:334] "Generic (PLEG): container finished" podID="3755dfee-851d-47aa-95a3-85dc9da31e53" containerID="31b96e3c4f15577f244d70bc725da3a08e54018c494fc700e62c79b521edd196" exitCode=0 Dec 02 16:58:30 crc kubenswrapper[4747]: I1202 16:58:30.600981 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" event={"ID":"3755dfee-851d-47aa-95a3-85dc9da31e53","Type":"ContainerDied","Data":"31b96e3c4f15577f244d70bc725da3a08e54018c494fc700e62c79b521edd196"} Dec 02 16:58:31 crc kubenswrapper[4747]: I1202 16:58:31.610269 4747 generic.go:334] "Generic (PLEG): container finished" podID="3755dfee-851d-47aa-95a3-85dc9da31e53" containerID="2f8a660f5fa849c44c35991a0e2ea373f2bd8f69cf07ec253deb8a9d59713258" exitCode=0 Dec 02 16:58:31 crc kubenswrapper[4747]: I1202 16:58:31.610327 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" event={"ID":"3755dfee-851d-47aa-95a3-85dc9da31e53","Type":"ContainerDied","Data":"2f8a660f5fa849c44c35991a0e2ea373f2bd8f69cf07ec253deb8a9d59713258"} Dec 02 16:58:32 crc kubenswrapper[4747]: I1202 16:58:32.432478 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j6cw4"] Dec 02 16:58:32 crc kubenswrapper[4747]: I1202 16:58:32.433177 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-j6cw4" podUID="184affdf-dfc3-49c6-b57d-65e529c4ff71" containerName="registry-server" containerID="cri-o://d81438052ecb20a6c90522eb893d135d45e66c91b857fc9ee20fd7bba8b57703" gracePeriod=2 Dec 02 16:58:32 crc kubenswrapper[4747]: I1202 16:58:32.904045 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.004447 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3755dfee-851d-47aa-95a3-85dc9da31e53-bundle\") pod \"3755dfee-851d-47aa-95a3-85dc9da31e53\" (UID: \"3755dfee-851d-47aa-95a3-85dc9da31e53\") " Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.004515 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3755dfee-851d-47aa-95a3-85dc9da31e53-util\") pod \"3755dfee-851d-47aa-95a3-85dc9da31e53\" (UID: \"3755dfee-851d-47aa-95a3-85dc9da31e53\") " Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.004616 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5q4k\" (UniqueName: \"kubernetes.io/projected/3755dfee-851d-47aa-95a3-85dc9da31e53-kube-api-access-m5q4k\") pod \"3755dfee-851d-47aa-95a3-85dc9da31e53\" (UID: \"3755dfee-851d-47aa-95a3-85dc9da31e53\") " Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.005445 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3755dfee-851d-47aa-95a3-85dc9da31e53-bundle" (OuterVolumeSpecName: "bundle") pod "3755dfee-851d-47aa-95a3-85dc9da31e53" (UID: "3755dfee-851d-47aa-95a3-85dc9da31e53"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.014551 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3755dfee-851d-47aa-95a3-85dc9da31e53-kube-api-access-m5q4k" (OuterVolumeSpecName: "kube-api-access-m5q4k") pod "3755dfee-851d-47aa-95a3-85dc9da31e53" (UID: "3755dfee-851d-47aa-95a3-85dc9da31e53"). InnerVolumeSpecName "kube-api-access-m5q4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.022968 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3755dfee-851d-47aa-95a3-85dc9da31e53-util" (OuterVolumeSpecName: "util") pod "3755dfee-851d-47aa-95a3-85dc9da31e53" (UID: "3755dfee-851d-47aa-95a3-85dc9da31e53"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.106930 4747 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3755dfee-851d-47aa-95a3-85dc9da31e53-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.106972 4747 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3755dfee-851d-47aa-95a3-85dc9da31e53-util\") on node \"crc\" DevicePath \"\"" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.106983 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5q4k\" (UniqueName: \"kubernetes.io/projected/3755dfee-851d-47aa-95a3-85dc9da31e53-kube-api-access-m5q4k\") on node \"crc\" DevicePath \"\"" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.481634 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j6cw4" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.616143 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/184affdf-dfc3-49c6-b57d-65e529c4ff71-catalog-content\") pod \"184affdf-dfc3-49c6-b57d-65e529c4ff71\" (UID: \"184affdf-dfc3-49c6-b57d-65e529c4ff71\") " Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.616315 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrxkb\" (UniqueName: \"kubernetes.io/projected/184affdf-dfc3-49c6-b57d-65e529c4ff71-kube-api-access-wrxkb\") pod \"184affdf-dfc3-49c6-b57d-65e529c4ff71\" (UID: \"184affdf-dfc3-49c6-b57d-65e529c4ff71\") " Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.616404 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/184affdf-dfc3-49c6-b57d-65e529c4ff71-utilities\") pod \"184affdf-dfc3-49c6-b57d-65e529c4ff71\" (UID: \"184affdf-dfc3-49c6-b57d-65e529c4ff71\") " Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.617964 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/184affdf-dfc3-49c6-b57d-65e529c4ff71-utilities" (OuterVolumeSpecName: "utilities") pod "184affdf-dfc3-49c6-b57d-65e529c4ff71" (UID: "184affdf-dfc3-49c6-b57d-65e529c4ff71"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.621054 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/184affdf-dfc3-49c6-b57d-65e529c4ff71-kube-api-access-wrxkb" (OuterVolumeSpecName: "kube-api-access-wrxkb") pod "184affdf-dfc3-49c6-b57d-65e529c4ff71" (UID: "184affdf-dfc3-49c6-b57d-65e529c4ff71"). InnerVolumeSpecName "kube-api-access-wrxkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.630566 4747 generic.go:334] "Generic (PLEG): container finished" podID="184affdf-dfc3-49c6-b57d-65e529c4ff71" containerID="d81438052ecb20a6c90522eb893d135d45e66c91b857fc9ee20fd7bba8b57703" exitCode=0 Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.630680 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j6cw4" event={"ID":"184affdf-dfc3-49c6-b57d-65e529c4ff71","Type":"ContainerDied","Data":"d81438052ecb20a6c90522eb893d135d45e66c91b857fc9ee20fd7bba8b57703"} Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.630739 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j6cw4" event={"ID":"184affdf-dfc3-49c6-b57d-65e529c4ff71","Type":"ContainerDied","Data":"2e8cd2d5c3c130d48562d93c118885f36ffe0f71f5b6015de446d06d14f1af0b"} Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.630771 4747 scope.go:117] "RemoveContainer" containerID="d81438052ecb20a6c90522eb893d135d45e66c91b857fc9ee20fd7bba8b57703" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.631025 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j6cw4" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.635157 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/184affdf-dfc3-49c6-b57d-65e529c4ff71-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "184affdf-dfc3-49c6-b57d-65e529c4ff71" (UID: "184affdf-dfc3-49c6-b57d-65e529c4ff71"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.637359 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" event={"ID":"3755dfee-851d-47aa-95a3-85dc9da31e53","Type":"ContainerDied","Data":"3fba219670f11ca0de08e6e6070ec5c16b726701806bc5ff53a95a83cab4cbda"} Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.637422 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3fba219670f11ca0de08e6e6070ec5c16b726701806bc5ff53a95a83cab4cbda" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.637510 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.662366 4747 scope.go:117] "RemoveContainer" containerID="5d1340dffee67dad2a7246f220fcaacc8641bdebb7248b608e7b1aa8a138fc18" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.678680 4747 scope.go:117] "RemoveContainer" containerID="cb3f23fc7a5589722f91011dc28a9180387d515aff540065d8cde3efda5a7b03" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.703706 4747 scope.go:117] "RemoveContainer" containerID="d81438052ecb20a6c90522eb893d135d45e66c91b857fc9ee20fd7bba8b57703" Dec 02 16:58:33 crc kubenswrapper[4747]: E1202 16:58:33.704538 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d81438052ecb20a6c90522eb893d135d45e66c91b857fc9ee20fd7bba8b57703\": container with ID starting with d81438052ecb20a6c90522eb893d135d45e66c91b857fc9ee20fd7bba8b57703 not found: ID does not exist" containerID="d81438052ecb20a6c90522eb893d135d45e66c91b857fc9ee20fd7bba8b57703" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.704688 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d81438052ecb20a6c90522eb893d135d45e66c91b857fc9ee20fd7bba8b57703"} err="failed to get container status \"d81438052ecb20a6c90522eb893d135d45e66c91b857fc9ee20fd7bba8b57703\": rpc error: code = NotFound desc = could not find container \"d81438052ecb20a6c90522eb893d135d45e66c91b857fc9ee20fd7bba8b57703\": container with ID starting with d81438052ecb20a6c90522eb893d135d45e66c91b857fc9ee20fd7bba8b57703 not found: ID does not exist" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.704793 4747 scope.go:117] "RemoveContainer" containerID="5d1340dffee67dad2a7246f220fcaacc8641bdebb7248b608e7b1aa8a138fc18" Dec 02 16:58:33 crc kubenswrapper[4747]: E1202 16:58:33.705479 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d1340dffee67dad2a7246f220fcaacc8641bdebb7248b608e7b1aa8a138fc18\": container with ID starting with 5d1340dffee67dad2a7246f220fcaacc8641bdebb7248b608e7b1aa8a138fc18 not found: ID does not exist" containerID="5d1340dffee67dad2a7246f220fcaacc8641bdebb7248b608e7b1aa8a138fc18" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.705556 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d1340dffee67dad2a7246f220fcaacc8641bdebb7248b608e7b1aa8a138fc18"} err="failed to get container status \"5d1340dffee67dad2a7246f220fcaacc8641bdebb7248b608e7b1aa8a138fc18\": rpc error: code = NotFound desc = could not find container \"5d1340dffee67dad2a7246f220fcaacc8641bdebb7248b608e7b1aa8a138fc18\": container with ID starting with 5d1340dffee67dad2a7246f220fcaacc8641bdebb7248b608e7b1aa8a138fc18 not found: ID does not exist" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.705602 4747 scope.go:117] "RemoveContainer" containerID="cb3f23fc7a5589722f91011dc28a9180387d515aff540065d8cde3efda5a7b03" Dec 02 16:58:33 crc kubenswrapper[4747]: E1202 16:58:33.706063 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb3f23fc7a5589722f91011dc28a9180387d515aff540065d8cde3efda5a7b03\": container with ID starting with cb3f23fc7a5589722f91011dc28a9180387d515aff540065d8cde3efda5a7b03 not found: ID does not exist" containerID="cb3f23fc7a5589722f91011dc28a9180387d515aff540065d8cde3efda5a7b03" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.706181 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb3f23fc7a5589722f91011dc28a9180387d515aff540065d8cde3efda5a7b03"} err="failed to get container status \"cb3f23fc7a5589722f91011dc28a9180387d515aff540065d8cde3efda5a7b03\": rpc error: code = NotFound desc = could not find container \"cb3f23fc7a5589722f91011dc28a9180387d515aff540065d8cde3efda5a7b03\": container with ID starting with cb3f23fc7a5589722f91011dc28a9180387d515aff540065d8cde3efda5a7b03 not found: ID does not exist" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.718355 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/184affdf-dfc3-49c6-b57d-65e529c4ff71-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.718489 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrxkb\" (UniqueName: \"kubernetes.io/projected/184affdf-dfc3-49c6-b57d-65e529c4ff71-kube-api-access-wrxkb\") on node \"crc\" DevicePath \"\"" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.718564 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/184affdf-dfc3-49c6-b57d-65e529c4ff71-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.976688 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j6cw4"] Dec 02 16:58:33 crc kubenswrapper[4747]: I1202 16:58:33.982606 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-j6cw4"] Dec 02 16:58:35 crc kubenswrapper[4747]: I1202 16:58:35.769832 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="184affdf-dfc3-49c6-b57d-65e529c4ff71" path="/var/lib/kubelet/pods/184affdf-dfc3-49c6-b57d-65e529c4ff71/volumes" Dec 02 16:58:39 crc kubenswrapper[4747]: I1202 16:58:39.314829 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5d4dbc7dd5-9bkrr"] Dec 02 16:58:39 crc kubenswrapper[4747]: E1202 16:58:39.315554 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3755dfee-851d-47aa-95a3-85dc9da31e53" containerName="pull" Dec 02 16:58:39 crc kubenswrapper[4747]: I1202 16:58:39.315572 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="3755dfee-851d-47aa-95a3-85dc9da31e53" containerName="pull" Dec 02 16:58:39 crc kubenswrapper[4747]: E1202 16:58:39.315593 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="184affdf-dfc3-49c6-b57d-65e529c4ff71" containerName="registry-server" Dec 02 16:58:39 crc kubenswrapper[4747]: I1202 16:58:39.315601 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="184affdf-dfc3-49c6-b57d-65e529c4ff71" containerName="registry-server" Dec 02 16:58:39 crc kubenswrapper[4747]: E1202 16:58:39.315613 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3755dfee-851d-47aa-95a3-85dc9da31e53" containerName="extract" Dec 02 16:58:39 crc kubenswrapper[4747]: I1202 16:58:39.315622 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="3755dfee-851d-47aa-95a3-85dc9da31e53" containerName="extract" Dec 02 16:58:39 crc kubenswrapper[4747]: E1202 16:58:39.315634 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="184affdf-dfc3-49c6-b57d-65e529c4ff71" containerName="extract-utilities" Dec 02 16:58:39 crc kubenswrapper[4747]: I1202 16:58:39.315642 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="184affdf-dfc3-49c6-b57d-65e529c4ff71" containerName="extract-utilities" Dec 02 16:58:39 crc kubenswrapper[4747]: E1202 16:58:39.315651 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="184affdf-dfc3-49c6-b57d-65e529c4ff71" containerName="extract-content" Dec 02 16:58:39 crc kubenswrapper[4747]: I1202 16:58:39.315678 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="184affdf-dfc3-49c6-b57d-65e529c4ff71" containerName="extract-content" Dec 02 16:58:39 crc kubenswrapper[4747]: E1202 16:58:39.315699 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3755dfee-851d-47aa-95a3-85dc9da31e53" containerName="util" Dec 02 16:58:39 crc kubenswrapper[4747]: I1202 16:58:39.315708 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="3755dfee-851d-47aa-95a3-85dc9da31e53" containerName="util" Dec 02 16:58:39 crc kubenswrapper[4747]: I1202 16:58:39.315879 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="3755dfee-851d-47aa-95a3-85dc9da31e53" containerName="extract" Dec 02 16:58:39 crc kubenswrapper[4747]: I1202 16:58:39.315894 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="184affdf-dfc3-49c6-b57d-65e529c4ff71" containerName="registry-server" Dec 02 16:58:39 crc kubenswrapper[4747]: I1202 16:58:39.316652 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-5d4dbc7dd5-9bkrr" Dec 02 16:58:39 crc kubenswrapper[4747]: I1202 16:58:39.332130 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-j665k" Dec 02 16:58:39 crc kubenswrapper[4747]: I1202 16:58:39.351991 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5d4dbc7dd5-9bkrr"] Dec 02 16:58:39 crc kubenswrapper[4747]: I1202 16:58:39.427884 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9h7c\" (UniqueName: \"kubernetes.io/projected/8983941a-65e0-4b84-9b02-38dc34133b0f-kube-api-access-s9h7c\") pod \"openstack-operator-controller-operator-5d4dbc7dd5-9bkrr\" (UID: \"8983941a-65e0-4b84-9b02-38dc34133b0f\") " pod="openstack-operators/openstack-operator-controller-operator-5d4dbc7dd5-9bkrr" Dec 02 16:58:39 crc kubenswrapper[4747]: I1202 16:58:39.528977 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9h7c\" (UniqueName: \"kubernetes.io/projected/8983941a-65e0-4b84-9b02-38dc34133b0f-kube-api-access-s9h7c\") pod \"openstack-operator-controller-operator-5d4dbc7dd5-9bkrr\" (UID: \"8983941a-65e0-4b84-9b02-38dc34133b0f\") " pod="openstack-operators/openstack-operator-controller-operator-5d4dbc7dd5-9bkrr" Dec 02 16:58:39 crc kubenswrapper[4747]: I1202 16:58:39.561997 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9h7c\" (UniqueName: \"kubernetes.io/projected/8983941a-65e0-4b84-9b02-38dc34133b0f-kube-api-access-s9h7c\") pod \"openstack-operator-controller-operator-5d4dbc7dd5-9bkrr\" (UID: \"8983941a-65e0-4b84-9b02-38dc34133b0f\") " pod="openstack-operators/openstack-operator-controller-operator-5d4dbc7dd5-9bkrr" Dec 02 16:58:39 crc kubenswrapper[4747]: I1202 16:58:39.636947 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-5d4dbc7dd5-9bkrr" Dec 02 16:58:40 crc kubenswrapper[4747]: I1202 16:58:40.079452 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5d4dbc7dd5-9bkrr"] Dec 02 16:58:40 crc kubenswrapper[4747]: I1202 16:58:40.688880 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5d4dbc7dd5-9bkrr" event={"ID":"8983941a-65e0-4b84-9b02-38dc34133b0f","Type":"ContainerStarted","Data":"46eafbe3263ed5ffd7e491e7221048e015a70609121d25edb5edb7cc728d8a97"} Dec 02 16:58:45 crc kubenswrapper[4747]: I1202 16:58:45.730806 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5d4dbc7dd5-9bkrr" event={"ID":"8983941a-65e0-4b84-9b02-38dc34133b0f","Type":"ContainerStarted","Data":"5d57b5a8492c4f50f7a285651ed5e0e7c674ec170f5e0c7e59c525caf7f0ebda"} Dec 02 16:58:48 crc kubenswrapper[4747]: I1202 16:58:48.756148 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5d4dbc7dd5-9bkrr" event={"ID":"8983941a-65e0-4b84-9b02-38dc34133b0f","Type":"ContainerStarted","Data":"fba7a932904b5c3c0509001f42cab379a55c8356b4a0d36ca3989bfca9ebb53c"} Dec 02 16:58:48 crc kubenswrapper[4747]: I1202 16:58:48.756699 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-5d4dbc7dd5-9bkrr" Dec 02 16:58:48 crc kubenswrapper[4747]: I1202 16:58:48.795643 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-5d4dbc7dd5-9bkrr" podStartSLOduration=2.19610245 podStartE2EDuration="9.795623179s" podCreationTimestamp="2025-12-02 16:58:39 +0000 UTC" firstStartedPulling="2025-12-02 16:58:40.099830878 +0000 UTC m=+950.626719627" lastFinishedPulling="2025-12-02 16:58:47.699351607 +0000 UTC m=+958.226240356" observedRunningTime="2025-12-02 16:58:48.7903753 +0000 UTC m=+959.317264049" watchObservedRunningTime="2025-12-02 16:58:48.795623179 +0000 UTC m=+959.322511928" Dec 02 16:58:52 crc kubenswrapper[4747]: I1202 16:58:52.048619 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hbcpx"] Dec 02 16:58:52 crc kubenswrapper[4747]: I1202 16:58:52.050766 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hbcpx" Dec 02 16:58:52 crc kubenswrapper[4747]: I1202 16:58:52.071443 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hbcpx"] Dec 02 16:58:52 crc kubenswrapper[4747]: I1202 16:58:52.131702 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a67fc0f0-a479-44dc-9192-48bfaeb57abf-utilities\") pod \"community-operators-hbcpx\" (UID: \"a67fc0f0-a479-44dc-9192-48bfaeb57abf\") " pod="openshift-marketplace/community-operators-hbcpx" Dec 02 16:58:52 crc kubenswrapper[4747]: I1202 16:58:52.131757 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6hmb\" (UniqueName: \"kubernetes.io/projected/a67fc0f0-a479-44dc-9192-48bfaeb57abf-kube-api-access-l6hmb\") pod \"community-operators-hbcpx\" (UID: \"a67fc0f0-a479-44dc-9192-48bfaeb57abf\") " pod="openshift-marketplace/community-operators-hbcpx" Dec 02 16:58:52 crc kubenswrapper[4747]: I1202 16:58:52.131791 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a67fc0f0-a479-44dc-9192-48bfaeb57abf-catalog-content\") pod \"community-operators-hbcpx\" (UID: \"a67fc0f0-a479-44dc-9192-48bfaeb57abf\") " pod="openshift-marketplace/community-operators-hbcpx" Dec 02 16:58:52 crc kubenswrapper[4747]: I1202 16:58:52.233256 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a67fc0f0-a479-44dc-9192-48bfaeb57abf-catalog-content\") pod \"community-operators-hbcpx\" (UID: \"a67fc0f0-a479-44dc-9192-48bfaeb57abf\") " pod="openshift-marketplace/community-operators-hbcpx" Dec 02 16:58:52 crc kubenswrapper[4747]: I1202 16:58:52.233412 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a67fc0f0-a479-44dc-9192-48bfaeb57abf-utilities\") pod \"community-operators-hbcpx\" (UID: \"a67fc0f0-a479-44dc-9192-48bfaeb57abf\") " pod="openshift-marketplace/community-operators-hbcpx" Dec 02 16:58:52 crc kubenswrapper[4747]: I1202 16:58:52.233442 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6hmb\" (UniqueName: \"kubernetes.io/projected/a67fc0f0-a479-44dc-9192-48bfaeb57abf-kube-api-access-l6hmb\") pod \"community-operators-hbcpx\" (UID: \"a67fc0f0-a479-44dc-9192-48bfaeb57abf\") " pod="openshift-marketplace/community-operators-hbcpx" Dec 02 16:58:52 crc kubenswrapper[4747]: I1202 16:58:52.234013 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a67fc0f0-a479-44dc-9192-48bfaeb57abf-catalog-content\") pod \"community-operators-hbcpx\" (UID: \"a67fc0f0-a479-44dc-9192-48bfaeb57abf\") " pod="openshift-marketplace/community-operators-hbcpx" Dec 02 16:58:52 crc kubenswrapper[4747]: I1202 16:58:52.234317 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a67fc0f0-a479-44dc-9192-48bfaeb57abf-utilities\") pod \"community-operators-hbcpx\" (UID: \"a67fc0f0-a479-44dc-9192-48bfaeb57abf\") " pod="openshift-marketplace/community-operators-hbcpx" Dec 02 16:58:52 crc kubenswrapper[4747]: I1202 16:58:52.255204 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6hmb\" (UniqueName: \"kubernetes.io/projected/a67fc0f0-a479-44dc-9192-48bfaeb57abf-kube-api-access-l6hmb\") pod \"community-operators-hbcpx\" (UID: \"a67fc0f0-a479-44dc-9192-48bfaeb57abf\") " pod="openshift-marketplace/community-operators-hbcpx" Dec 02 16:58:52 crc kubenswrapper[4747]: I1202 16:58:52.373430 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hbcpx" Dec 02 16:58:53 crc kubenswrapper[4747]: I1202 16:58:53.047836 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hbcpx"] Dec 02 16:58:53 crc kubenswrapper[4747]: I1202 16:58:53.821754 4747 generic.go:334] "Generic (PLEG): container finished" podID="a67fc0f0-a479-44dc-9192-48bfaeb57abf" containerID="8b9738a69869716411ad8f08932c13c37d5b7e9605a08ce011b06defb097a291" exitCode=0 Dec 02 16:58:53 crc kubenswrapper[4747]: I1202 16:58:53.821825 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hbcpx" event={"ID":"a67fc0f0-a479-44dc-9192-48bfaeb57abf","Type":"ContainerDied","Data":"8b9738a69869716411ad8f08932c13c37d5b7e9605a08ce011b06defb097a291"} Dec 02 16:58:53 crc kubenswrapper[4747]: I1202 16:58:53.822444 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hbcpx" event={"ID":"a67fc0f0-a479-44dc-9192-48bfaeb57abf","Type":"ContainerStarted","Data":"7aecc715e4be3e5ffb21f9453baf6d0a522b4ce25df81eabac208bf28124177f"} Dec 02 16:58:54 crc kubenswrapper[4747]: I1202 16:58:54.840566 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ll4zf"] Dec 02 16:58:54 crc kubenswrapper[4747]: I1202 16:58:54.842593 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ll4zf" Dec 02 16:58:54 crc kubenswrapper[4747]: I1202 16:58:54.862895 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ll4zf"] Dec 02 16:58:54 crc kubenswrapper[4747]: I1202 16:58:54.974387 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f80debf-ea47-4d1d-b52e-d8773e61d612-utilities\") pod \"certified-operators-ll4zf\" (UID: \"7f80debf-ea47-4d1d-b52e-d8773e61d612\") " pod="openshift-marketplace/certified-operators-ll4zf" Dec 02 16:58:54 crc kubenswrapper[4747]: I1202 16:58:54.974600 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6sn5\" (UniqueName: \"kubernetes.io/projected/7f80debf-ea47-4d1d-b52e-d8773e61d612-kube-api-access-f6sn5\") pod \"certified-operators-ll4zf\" (UID: \"7f80debf-ea47-4d1d-b52e-d8773e61d612\") " pod="openshift-marketplace/certified-operators-ll4zf" Dec 02 16:58:54 crc kubenswrapper[4747]: I1202 16:58:54.974659 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f80debf-ea47-4d1d-b52e-d8773e61d612-catalog-content\") pod \"certified-operators-ll4zf\" (UID: \"7f80debf-ea47-4d1d-b52e-d8773e61d612\") " pod="openshift-marketplace/certified-operators-ll4zf" Dec 02 16:58:55 crc kubenswrapper[4747]: I1202 16:58:55.075864 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f80debf-ea47-4d1d-b52e-d8773e61d612-catalog-content\") pod \"certified-operators-ll4zf\" (UID: \"7f80debf-ea47-4d1d-b52e-d8773e61d612\") " pod="openshift-marketplace/certified-operators-ll4zf" Dec 02 16:58:55 crc kubenswrapper[4747]: I1202 16:58:55.076032 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f80debf-ea47-4d1d-b52e-d8773e61d612-utilities\") pod \"certified-operators-ll4zf\" (UID: \"7f80debf-ea47-4d1d-b52e-d8773e61d612\") " pod="openshift-marketplace/certified-operators-ll4zf" Dec 02 16:58:55 crc kubenswrapper[4747]: I1202 16:58:55.076076 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6sn5\" (UniqueName: \"kubernetes.io/projected/7f80debf-ea47-4d1d-b52e-d8773e61d612-kube-api-access-f6sn5\") pod \"certified-operators-ll4zf\" (UID: \"7f80debf-ea47-4d1d-b52e-d8773e61d612\") " pod="openshift-marketplace/certified-operators-ll4zf" Dec 02 16:58:55 crc kubenswrapper[4747]: I1202 16:58:55.076546 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f80debf-ea47-4d1d-b52e-d8773e61d612-catalog-content\") pod \"certified-operators-ll4zf\" (UID: \"7f80debf-ea47-4d1d-b52e-d8773e61d612\") " pod="openshift-marketplace/certified-operators-ll4zf" Dec 02 16:58:55 crc kubenswrapper[4747]: I1202 16:58:55.076647 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f80debf-ea47-4d1d-b52e-d8773e61d612-utilities\") pod \"certified-operators-ll4zf\" (UID: \"7f80debf-ea47-4d1d-b52e-d8773e61d612\") " pod="openshift-marketplace/certified-operators-ll4zf" Dec 02 16:58:55 crc kubenswrapper[4747]: I1202 16:58:55.101722 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6sn5\" (UniqueName: \"kubernetes.io/projected/7f80debf-ea47-4d1d-b52e-d8773e61d612-kube-api-access-f6sn5\") pod \"certified-operators-ll4zf\" (UID: \"7f80debf-ea47-4d1d-b52e-d8773e61d612\") " pod="openshift-marketplace/certified-operators-ll4zf" Dec 02 16:58:55 crc kubenswrapper[4747]: I1202 16:58:55.216445 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ll4zf" Dec 02 16:58:55 crc kubenswrapper[4747]: I1202 16:58:55.837956 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hbcpx" event={"ID":"a67fc0f0-a479-44dc-9192-48bfaeb57abf","Type":"ContainerStarted","Data":"6ac0152781385e0f29d242305f10074e575b8f776d7ea39115280aaccc0cf83a"} Dec 02 16:58:55 crc kubenswrapper[4747]: W1202 16:58:55.894162 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f80debf_ea47_4d1d_b52e_d8773e61d612.slice/crio-22135b536ef1b1761f4563d7ffa23758ac3bccf86d4db42ffb6ccb58d9cd26f3 WatchSource:0}: Error finding container 22135b536ef1b1761f4563d7ffa23758ac3bccf86d4db42ffb6ccb58d9cd26f3: Status 404 returned error can't find the container with id 22135b536ef1b1761f4563d7ffa23758ac3bccf86d4db42ffb6ccb58d9cd26f3 Dec 02 16:58:55 crc kubenswrapper[4747]: I1202 16:58:55.899446 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ll4zf"] Dec 02 16:58:56 crc kubenswrapper[4747]: I1202 16:58:56.847141 4747 generic.go:334] "Generic (PLEG): container finished" podID="a67fc0f0-a479-44dc-9192-48bfaeb57abf" containerID="6ac0152781385e0f29d242305f10074e575b8f776d7ea39115280aaccc0cf83a" exitCode=0 Dec 02 16:58:56 crc kubenswrapper[4747]: I1202 16:58:56.847227 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hbcpx" event={"ID":"a67fc0f0-a479-44dc-9192-48bfaeb57abf","Type":"ContainerDied","Data":"6ac0152781385e0f29d242305f10074e575b8f776d7ea39115280aaccc0cf83a"} Dec 02 16:58:56 crc kubenswrapper[4747]: I1202 16:58:56.853746 4747 generic.go:334] "Generic (PLEG): container finished" podID="7f80debf-ea47-4d1d-b52e-d8773e61d612" containerID="23feccb8c5b1e8ddfc09b7e447cb654e0f3e35f63c4ac9ccede3e1239af659b6" exitCode=0 Dec 02 16:58:56 crc kubenswrapper[4747]: I1202 16:58:56.853873 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ll4zf" event={"ID":"7f80debf-ea47-4d1d-b52e-d8773e61d612","Type":"ContainerDied","Data":"23feccb8c5b1e8ddfc09b7e447cb654e0f3e35f63c4ac9ccede3e1239af659b6"} Dec 02 16:58:56 crc kubenswrapper[4747]: I1202 16:58:56.854025 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ll4zf" event={"ID":"7f80debf-ea47-4d1d-b52e-d8773e61d612","Type":"ContainerStarted","Data":"22135b536ef1b1761f4563d7ffa23758ac3bccf86d4db42ffb6ccb58d9cd26f3"} Dec 02 16:58:57 crc kubenswrapper[4747]: I1202 16:58:57.894504 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hbcpx" event={"ID":"a67fc0f0-a479-44dc-9192-48bfaeb57abf","Type":"ContainerStarted","Data":"be5afc481cb1ed2d5696293acc2aaffa1f77c1a79e72029c49bd436cfac4fd68"} Dec 02 16:58:57 crc kubenswrapper[4747]: I1202 16:58:57.917613 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hbcpx" podStartSLOduration=2.430813149 podStartE2EDuration="5.917583081s" podCreationTimestamp="2025-12-02 16:58:52 +0000 UTC" firstStartedPulling="2025-12-02 16:58:53.825663467 +0000 UTC m=+964.352552216" lastFinishedPulling="2025-12-02 16:58:57.312433399 +0000 UTC m=+967.839322148" observedRunningTime="2025-12-02 16:58:57.917346484 +0000 UTC m=+968.444235233" watchObservedRunningTime="2025-12-02 16:58:57.917583081 +0000 UTC m=+968.444471830" Dec 02 16:58:58 crc kubenswrapper[4747]: I1202 16:58:58.907126 4747 generic.go:334] "Generic (PLEG): container finished" podID="7f80debf-ea47-4d1d-b52e-d8773e61d612" containerID="13690092cff593ee901c49909b1ac1ffb032f8af319be540a6eb5446c7461e16" exitCode=0 Dec 02 16:58:58 crc kubenswrapper[4747]: I1202 16:58:58.907284 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ll4zf" event={"ID":"7f80debf-ea47-4d1d-b52e-d8773e61d612","Type":"ContainerDied","Data":"13690092cff593ee901c49909b1ac1ffb032f8af319be540a6eb5446c7461e16"} Dec 02 16:58:59 crc kubenswrapper[4747]: I1202 16:58:59.641170 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-5d4dbc7dd5-9bkrr" Dec 02 16:59:00 crc kubenswrapper[4747]: I1202 16:59:00.988237 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ll4zf" event={"ID":"7f80debf-ea47-4d1d-b52e-d8773e61d612","Type":"ContainerStarted","Data":"5689510f729546182ee6eb04fd5a08ce9c24d8b32c67cf231cb2062ab69d355d"} Dec 02 16:59:01 crc kubenswrapper[4747]: I1202 16:59:01.022394 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ll4zf" podStartSLOduration=3.9366899159999997 podStartE2EDuration="7.022374753s" podCreationTimestamp="2025-12-02 16:58:54 +0000 UTC" firstStartedPulling="2025-12-02 16:58:56.856002137 +0000 UTC m=+967.382890886" lastFinishedPulling="2025-12-02 16:58:59.941686974 +0000 UTC m=+970.468575723" observedRunningTime="2025-12-02 16:59:01.020088208 +0000 UTC m=+971.546976957" watchObservedRunningTime="2025-12-02 16:59:01.022374753 +0000 UTC m=+971.549263502" Dec 02 16:59:02 crc kubenswrapper[4747]: I1202 16:59:02.373860 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hbcpx" Dec 02 16:59:02 crc kubenswrapper[4747]: I1202 16:59:02.373986 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hbcpx" Dec 02 16:59:02 crc kubenswrapper[4747]: I1202 16:59:02.424776 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hbcpx" Dec 02 16:59:03 crc kubenswrapper[4747]: I1202 16:59:03.054631 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hbcpx" Dec 02 16:59:05 crc kubenswrapper[4747]: I1202 16:59:05.217333 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ll4zf" Dec 02 16:59:05 crc kubenswrapper[4747]: I1202 16:59:05.217440 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ll4zf" Dec 02 16:59:05 crc kubenswrapper[4747]: I1202 16:59:05.259139 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ll4zf" Dec 02 16:59:06 crc kubenswrapper[4747]: I1202 16:59:06.036138 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hbcpx"] Dec 02 16:59:06 crc kubenswrapper[4747]: I1202 16:59:06.036543 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hbcpx" podUID="a67fc0f0-a479-44dc-9192-48bfaeb57abf" containerName="registry-server" containerID="cri-o://be5afc481cb1ed2d5696293acc2aaffa1f77c1a79e72029c49bd436cfac4fd68" gracePeriod=2 Dec 02 16:59:06 crc kubenswrapper[4747]: I1202 16:59:06.065456 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ll4zf" Dec 02 16:59:06 crc kubenswrapper[4747]: I1202 16:59:06.833964 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ll4zf"] Dec 02 16:59:08 crc kubenswrapper[4747]: I1202 16:59:08.036841 4747 generic.go:334] "Generic (PLEG): container finished" podID="a67fc0f0-a479-44dc-9192-48bfaeb57abf" containerID="be5afc481cb1ed2d5696293acc2aaffa1f77c1a79e72029c49bd436cfac4fd68" exitCode=0 Dec 02 16:59:08 crc kubenswrapper[4747]: I1202 16:59:08.036956 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hbcpx" event={"ID":"a67fc0f0-a479-44dc-9192-48bfaeb57abf","Type":"ContainerDied","Data":"be5afc481cb1ed2d5696293acc2aaffa1f77c1a79e72029c49bd436cfac4fd68"} Dec 02 16:59:08 crc kubenswrapper[4747]: I1202 16:59:08.037122 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ll4zf" podUID="7f80debf-ea47-4d1d-b52e-d8773e61d612" containerName="registry-server" containerID="cri-o://5689510f729546182ee6eb04fd5a08ce9c24d8b32c67cf231cb2062ab69d355d" gracePeriod=2 Dec 02 16:59:10 crc kubenswrapper[4747]: I1202 16:59:10.689445 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hbcpx" Dec 02 16:59:10 crc kubenswrapper[4747]: I1202 16:59:10.823165 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a67fc0f0-a479-44dc-9192-48bfaeb57abf-catalog-content\") pod \"a67fc0f0-a479-44dc-9192-48bfaeb57abf\" (UID: \"a67fc0f0-a479-44dc-9192-48bfaeb57abf\") " Dec 02 16:59:10 crc kubenswrapper[4747]: I1202 16:59:10.823302 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a67fc0f0-a479-44dc-9192-48bfaeb57abf-utilities\") pod \"a67fc0f0-a479-44dc-9192-48bfaeb57abf\" (UID: \"a67fc0f0-a479-44dc-9192-48bfaeb57abf\") " Dec 02 16:59:10 crc kubenswrapper[4747]: I1202 16:59:10.823337 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6hmb\" (UniqueName: \"kubernetes.io/projected/a67fc0f0-a479-44dc-9192-48bfaeb57abf-kube-api-access-l6hmb\") pod \"a67fc0f0-a479-44dc-9192-48bfaeb57abf\" (UID: \"a67fc0f0-a479-44dc-9192-48bfaeb57abf\") " Dec 02 16:59:10 crc kubenswrapper[4747]: I1202 16:59:10.824743 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a67fc0f0-a479-44dc-9192-48bfaeb57abf-utilities" (OuterVolumeSpecName: "utilities") pod "a67fc0f0-a479-44dc-9192-48bfaeb57abf" (UID: "a67fc0f0-a479-44dc-9192-48bfaeb57abf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:59:10 crc kubenswrapper[4747]: I1202 16:59:10.829206 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a67fc0f0-a479-44dc-9192-48bfaeb57abf-kube-api-access-l6hmb" (OuterVolumeSpecName: "kube-api-access-l6hmb") pod "a67fc0f0-a479-44dc-9192-48bfaeb57abf" (UID: "a67fc0f0-a479-44dc-9192-48bfaeb57abf"). InnerVolumeSpecName "kube-api-access-l6hmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:59:10 crc kubenswrapper[4747]: I1202 16:59:10.874862 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a67fc0f0-a479-44dc-9192-48bfaeb57abf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a67fc0f0-a479-44dc-9192-48bfaeb57abf" (UID: "a67fc0f0-a479-44dc-9192-48bfaeb57abf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:59:10 crc kubenswrapper[4747]: I1202 16:59:10.925341 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a67fc0f0-a479-44dc-9192-48bfaeb57abf-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 16:59:10 crc kubenswrapper[4747]: I1202 16:59:10.925387 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6hmb\" (UniqueName: \"kubernetes.io/projected/a67fc0f0-a479-44dc-9192-48bfaeb57abf-kube-api-access-l6hmb\") on node \"crc\" DevicePath \"\"" Dec 02 16:59:10 crc kubenswrapper[4747]: I1202 16:59:10.925399 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a67fc0f0-a479-44dc-9192-48bfaeb57abf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.056599 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ll4zf" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.063543 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hbcpx" event={"ID":"a67fc0f0-a479-44dc-9192-48bfaeb57abf","Type":"ContainerDied","Data":"7aecc715e4be3e5ffb21f9453baf6d0a522b4ce25df81eabac208bf28124177f"} Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.063609 4747 scope.go:117] "RemoveContainer" containerID="be5afc481cb1ed2d5696293acc2aaffa1f77c1a79e72029c49bd436cfac4fd68" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.063607 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hbcpx" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.067482 4747 generic.go:334] "Generic (PLEG): container finished" podID="7f80debf-ea47-4d1d-b52e-d8773e61d612" containerID="5689510f729546182ee6eb04fd5a08ce9c24d8b32c67cf231cb2062ab69d355d" exitCode=0 Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.067542 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ll4zf" event={"ID":"7f80debf-ea47-4d1d-b52e-d8773e61d612","Type":"ContainerDied","Data":"5689510f729546182ee6eb04fd5a08ce9c24d8b32c67cf231cb2062ab69d355d"} Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.067582 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ll4zf" event={"ID":"7f80debf-ea47-4d1d-b52e-d8773e61d612","Type":"ContainerDied","Data":"22135b536ef1b1761f4563d7ffa23758ac3bccf86d4db42ffb6ccb58d9cd26f3"} Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.067657 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ll4zf" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.089812 4747 scope.go:117] "RemoveContainer" containerID="6ac0152781385e0f29d242305f10074e575b8f776d7ea39115280aaccc0cf83a" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.108330 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hbcpx"] Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.116415 4747 scope.go:117] "RemoveContainer" containerID="8b9738a69869716411ad8f08932c13c37d5b7e9605a08ce011b06defb097a291" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.117308 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hbcpx"] Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.129286 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f80debf-ea47-4d1d-b52e-d8773e61d612-catalog-content\") pod \"7f80debf-ea47-4d1d-b52e-d8773e61d612\" (UID: \"7f80debf-ea47-4d1d-b52e-d8773e61d612\") " Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.129333 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f80debf-ea47-4d1d-b52e-d8773e61d612-utilities\") pod \"7f80debf-ea47-4d1d-b52e-d8773e61d612\" (UID: \"7f80debf-ea47-4d1d-b52e-d8773e61d612\") " Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.129361 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6sn5\" (UniqueName: \"kubernetes.io/projected/7f80debf-ea47-4d1d-b52e-d8773e61d612-kube-api-access-f6sn5\") pod \"7f80debf-ea47-4d1d-b52e-d8773e61d612\" (UID: \"7f80debf-ea47-4d1d-b52e-d8773e61d612\") " Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.130893 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f80debf-ea47-4d1d-b52e-d8773e61d612-utilities" (OuterVolumeSpecName: "utilities") pod "7f80debf-ea47-4d1d-b52e-d8773e61d612" (UID: "7f80debf-ea47-4d1d-b52e-d8773e61d612"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.134620 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f80debf-ea47-4d1d-b52e-d8773e61d612-kube-api-access-f6sn5" (OuterVolumeSpecName: "kube-api-access-f6sn5") pod "7f80debf-ea47-4d1d-b52e-d8773e61d612" (UID: "7f80debf-ea47-4d1d-b52e-d8773e61d612"). InnerVolumeSpecName "kube-api-access-f6sn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.139430 4747 scope.go:117] "RemoveContainer" containerID="5689510f729546182ee6eb04fd5a08ce9c24d8b32c67cf231cb2062ab69d355d" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.154733 4747 scope.go:117] "RemoveContainer" containerID="13690092cff593ee901c49909b1ac1ffb032f8af319be540a6eb5446c7461e16" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.171060 4747 scope.go:117] "RemoveContainer" containerID="23feccb8c5b1e8ddfc09b7e447cb654e0f3e35f63c4ac9ccede3e1239af659b6" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.185785 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f80debf-ea47-4d1d-b52e-d8773e61d612-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7f80debf-ea47-4d1d-b52e-d8773e61d612" (UID: "7f80debf-ea47-4d1d-b52e-d8773e61d612"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.189380 4747 scope.go:117] "RemoveContainer" containerID="5689510f729546182ee6eb04fd5a08ce9c24d8b32c67cf231cb2062ab69d355d" Dec 02 16:59:11 crc kubenswrapper[4747]: E1202 16:59:11.189969 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5689510f729546182ee6eb04fd5a08ce9c24d8b32c67cf231cb2062ab69d355d\": container with ID starting with 5689510f729546182ee6eb04fd5a08ce9c24d8b32c67cf231cb2062ab69d355d not found: ID does not exist" containerID="5689510f729546182ee6eb04fd5a08ce9c24d8b32c67cf231cb2062ab69d355d" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.190029 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5689510f729546182ee6eb04fd5a08ce9c24d8b32c67cf231cb2062ab69d355d"} err="failed to get container status \"5689510f729546182ee6eb04fd5a08ce9c24d8b32c67cf231cb2062ab69d355d\": rpc error: code = NotFound desc = could not find container \"5689510f729546182ee6eb04fd5a08ce9c24d8b32c67cf231cb2062ab69d355d\": container with ID starting with 5689510f729546182ee6eb04fd5a08ce9c24d8b32c67cf231cb2062ab69d355d not found: ID does not exist" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.190067 4747 scope.go:117] "RemoveContainer" containerID="13690092cff593ee901c49909b1ac1ffb032f8af319be540a6eb5446c7461e16" Dec 02 16:59:11 crc kubenswrapper[4747]: E1202 16:59:11.190498 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13690092cff593ee901c49909b1ac1ffb032f8af319be540a6eb5446c7461e16\": container with ID starting with 13690092cff593ee901c49909b1ac1ffb032f8af319be540a6eb5446c7461e16 not found: ID does not exist" containerID="13690092cff593ee901c49909b1ac1ffb032f8af319be540a6eb5446c7461e16" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.190535 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13690092cff593ee901c49909b1ac1ffb032f8af319be540a6eb5446c7461e16"} err="failed to get container status \"13690092cff593ee901c49909b1ac1ffb032f8af319be540a6eb5446c7461e16\": rpc error: code = NotFound desc = could not find container \"13690092cff593ee901c49909b1ac1ffb032f8af319be540a6eb5446c7461e16\": container with ID starting with 13690092cff593ee901c49909b1ac1ffb032f8af319be540a6eb5446c7461e16 not found: ID does not exist" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.190560 4747 scope.go:117] "RemoveContainer" containerID="23feccb8c5b1e8ddfc09b7e447cb654e0f3e35f63c4ac9ccede3e1239af659b6" Dec 02 16:59:11 crc kubenswrapper[4747]: E1202 16:59:11.190859 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23feccb8c5b1e8ddfc09b7e447cb654e0f3e35f63c4ac9ccede3e1239af659b6\": container with ID starting with 23feccb8c5b1e8ddfc09b7e447cb654e0f3e35f63c4ac9ccede3e1239af659b6 not found: ID does not exist" containerID="23feccb8c5b1e8ddfc09b7e447cb654e0f3e35f63c4ac9ccede3e1239af659b6" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.190929 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23feccb8c5b1e8ddfc09b7e447cb654e0f3e35f63c4ac9ccede3e1239af659b6"} err="failed to get container status \"23feccb8c5b1e8ddfc09b7e447cb654e0f3e35f63c4ac9ccede3e1239af659b6\": rpc error: code = NotFound desc = could not find container \"23feccb8c5b1e8ddfc09b7e447cb654e0f3e35f63c4ac9ccede3e1239af659b6\": container with ID starting with 23feccb8c5b1e8ddfc09b7e447cb654e0f3e35f63c4ac9ccede3e1239af659b6 not found: ID does not exist" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.230212 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6sn5\" (UniqueName: \"kubernetes.io/projected/7f80debf-ea47-4d1d-b52e-d8773e61d612-kube-api-access-f6sn5\") on node \"crc\" DevicePath \"\"" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.230250 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f80debf-ea47-4d1d-b52e-d8773e61d612-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.230261 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f80debf-ea47-4d1d-b52e-d8773e61d612-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.414022 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ll4zf"] Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.421271 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ll4zf"] Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.769946 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f80debf-ea47-4d1d-b52e-d8773e61d612" path="/var/lib/kubelet/pods/7f80debf-ea47-4d1d-b52e-d8773e61d612/volumes" Dec 02 16:59:11 crc kubenswrapper[4747]: I1202 16:59:11.770567 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a67fc0f0-a479-44dc-9192-48bfaeb57abf" path="/var/lib/kubelet/pods/a67fc0f0-a479-44dc-9192-48bfaeb57abf/volumes" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.160736 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-5bfbbb859d-5b75z"] Dec 02 16:59:26 crc kubenswrapper[4747]: E1202 16:59:26.161806 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f80debf-ea47-4d1d-b52e-d8773e61d612" containerName="extract-utilities" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.161825 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f80debf-ea47-4d1d-b52e-d8773e61d612" containerName="extract-utilities" Dec 02 16:59:26 crc kubenswrapper[4747]: E1202 16:59:26.161844 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a67fc0f0-a479-44dc-9192-48bfaeb57abf" containerName="registry-server" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.161850 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a67fc0f0-a479-44dc-9192-48bfaeb57abf" containerName="registry-server" Dec 02 16:59:26 crc kubenswrapper[4747]: E1202 16:59:26.161867 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a67fc0f0-a479-44dc-9192-48bfaeb57abf" containerName="extract-utilities" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.161876 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a67fc0f0-a479-44dc-9192-48bfaeb57abf" containerName="extract-utilities" Dec 02 16:59:26 crc kubenswrapper[4747]: E1202 16:59:26.161889 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f80debf-ea47-4d1d-b52e-d8773e61d612" containerName="extract-content" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.161895 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f80debf-ea47-4d1d-b52e-d8773e61d612" containerName="extract-content" Dec 02 16:59:26 crc kubenswrapper[4747]: E1202 16:59:26.161925 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f80debf-ea47-4d1d-b52e-d8773e61d612" containerName="registry-server" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.161931 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f80debf-ea47-4d1d-b52e-d8773e61d612" containerName="registry-server" Dec 02 16:59:26 crc kubenswrapper[4747]: E1202 16:59:26.161946 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a67fc0f0-a479-44dc-9192-48bfaeb57abf" containerName="extract-content" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.161956 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a67fc0f0-a479-44dc-9192-48bfaeb57abf" containerName="extract-content" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.162115 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f80debf-ea47-4d1d-b52e-d8773e61d612" containerName="registry-server" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.162138 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a67fc0f0-a479-44dc-9192-48bfaeb57abf" containerName="registry-server" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.162961 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-5b75z" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.167157 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-8k6s4" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.177654 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-5bfbbb859d-5b75z"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.190763 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-748967c98-zjpdf"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.192027 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-748967c98-zjpdf" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.195248 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-8hrqk" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.216281 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6w9p\" (UniqueName: \"kubernetes.io/projected/02a1a6c9-a064-447d-85b3-61d6de6bba1a-kube-api-access-f6w9p\") pod \"barbican-operator-controller-manager-5bfbbb859d-5b75z\" (UID: \"02a1a6c9-a064-447d-85b3-61d6de6bba1a\") " pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-5b75z" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.216364 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-272sc\" (UniqueName: \"kubernetes.io/projected/84349f11-8427-4aa8-ae24-750a6fdc5e78-kube-api-access-272sc\") pod \"cinder-operator-controller-manager-748967c98-zjpdf\" (UID: \"84349f11-8427-4aa8-ae24-750a6fdc5e78\") " pod="openstack-operators/cinder-operator-controller-manager-748967c98-zjpdf" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.217876 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-6788cc6d75-nsvrj"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.219183 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-nsvrj" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.224670 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-85fbd69fcd-ns9p8"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.224696 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-p25fl" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.225760 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-ns9p8" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.230891 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-xv9jt" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.242629 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-85fbd69fcd-ns9p8"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.306012 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-698d6fd7d6-jhjgl"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.307590 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-jhjgl" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.311505 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-748967c98-zjpdf"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.318162 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6w9p\" (UniqueName: \"kubernetes.io/projected/02a1a6c9-a064-447d-85b3-61d6de6bba1a-kube-api-access-f6w9p\") pod \"barbican-operator-controller-manager-5bfbbb859d-5b75z\" (UID: \"02a1a6c9-a064-447d-85b3-61d6de6bba1a\") " pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-5b75z" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.318255 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-272sc\" (UniqueName: \"kubernetes.io/projected/84349f11-8427-4aa8-ae24-750a6fdc5e78-kube-api-access-272sc\") pod \"cinder-operator-controller-manager-748967c98-zjpdf\" (UID: \"84349f11-8427-4aa8-ae24-750a6fdc5e78\") " pod="openstack-operators/cinder-operator-controller-manager-748967c98-zjpdf" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.318442 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-78q9c" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.319249 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-698d6fd7d6-jhjgl"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.336528 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6788cc6d75-nsvrj"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.360184 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6w9p\" (UniqueName: \"kubernetes.io/projected/02a1a6c9-a064-447d-85b3-61d6de6bba1a-kube-api-access-f6w9p\") pod \"barbican-operator-controller-manager-5bfbbb859d-5b75z\" (UID: \"02a1a6c9-a064-447d-85b3-61d6de6bba1a\") " pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-5b75z" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.363459 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-dhznh"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.365007 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-dhznh" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.377714 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-2ml6h" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.378502 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-dhznh"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.385732 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-272sc\" (UniqueName: \"kubernetes.io/projected/84349f11-8427-4aa8-ae24-750a6fdc5e78-kube-api-access-272sc\") pod \"cinder-operator-controller-manager-748967c98-zjpdf\" (UID: \"84349f11-8427-4aa8-ae24-750a6fdc5e78\") " pod="openstack-operators/cinder-operator-controller-manager-748967c98-zjpdf" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.392320 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-6c55d8d69b-f7k5g"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.393715 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-6c55d8d69b-f7k5g" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.396030 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-w7nzn" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.399053 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.414366 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-6c55d8d69b-f7k5g"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.424837 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9npg\" (UniqueName: \"kubernetes.io/projected/b1871043-c496-421e-8055-817652748d46-kube-api-access-t9npg\") pod \"designate-operator-controller-manager-6788cc6d75-nsvrj\" (UID: \"b1871043-c496-421e-8055-817652748d46\") " pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-nsvrj" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.425002 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twpgd\" (UniqueName: \"kubernetes.io/projected/aac0c240-30e1-410d-bab3-b87965dbd297-kube-api-access-twpgd\") pod \"heat-operator-controller-manager-698d6fd7d6-jhjgl\" (UID: \"aac0c240-30e1-410d-bab3-b87965dbd297\") " pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-jhjgl" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.425042 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nthcr\" (UniqueName: \"kubernetes.io/projected/1a8a4d9e-ee5e-4235-bae4-23eb196dac78-kube-api-access-nthcr\") pod \"glance-operator-controller-manager-85fbd69fcd-ns9p8\" (UID: \"1a8a4d9e-ee5e-4235-bae4-23eb196dac78\") " pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-ns9p8" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.435689 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-54485f899-qjnsp"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.437170 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-54485f899-qjnsp" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.451487 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-5pnt4" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.453557 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-54485f899-qjnsp"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.464025 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-79cc9d59f5-ftf2c"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.465489 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-ftf2c" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.475800 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-s4tjt" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.487125 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-5b75z" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.490984 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-79cc9d59f5-ftf2c"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.509814 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-748967c98-zjpdf" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.529638 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ws9k\" (UniqueName: \"kubernetes.io/projected/dff222d7-fd30-4d3a-839a-6478da00ef65-kube-api-access-4ws9k\") pod \"horizon-operator-controller-manager-7d5d9fd47f-dhznh\" (UID: \"dff222d7-fd30-4d3a-839a-6478da00ef65\") " pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-dhznh" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.529757 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twpgd\" (UniqueName: \"kubernetes.io/projected/aac0c240-30e1-410d-bab3-b87965dbd297-kube-api-access-twpgd\") pod \"heat-operator-controller-manager-698d6fd7d6-jhjgl\" (UID: \"aac0c240-30e1-410d-bab3-b87965dbd297\") " pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-jhjgl" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.529807 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb-cert\") pod \"infra-operator-controller-manager-6c55d8d69b-f7k5g\" (UID: \"eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb\") " pod="openstack-operators/infra-operator-controller-manager-6c55d8d69b-f7k5g" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.529859 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nthcr\" (UniqueName: \"kubernetes.io/projected/1a8a4d9e-ee5e-4235-bae4-23eb196dac78-kube-api-access-nthcr\") pod \"glance-operator-controller-manager-85fbd69fcd-ns9p8\" (UID: \"1a8a4d9e-ee5e-4235-bae4-23eb196dac78\") " pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-ns9p8" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.529898 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgqmv\" (UniqueName: \"kubernetes.io/projected/eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb-kube-api-access-pgqmv\") pod \"infra-operator-controller-manager-6c55d8d69b-f7k5g\" (UID: \"eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb\") " pod="openstack-operators/infra-operator-controller-manager-6c55d8d69b-f7k5g" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.529964 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9npg\" (UniqueName: \"kubernetes.io/projected/b1871043-c496-421e-8055-817652748d46-kube-api-access-t9npg\") pod \"designate-operator-controller-manager-6788cc6d75-nsvrj\" (UID: \"b1871043-c496-421e-8055-817652748d46\") " pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-nsvrj" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.614958 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twpgd\" (UniqueName: \"kubernetes.io/projected/aac0c240-30e1-410d-bab3-b87965dbd297-kube-api-access-twpgd\") pod \"heat-operator-controller-manager-698d6fd7d6-jhjgl\" (UID: \"aac0c240-30e1-410d-bab3-b87965dbd297\") " pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-jhjgl" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.624534 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9npg\" (UniqueName: \"kubernetes.io/projected/b1871043-c496-421e-8055-817652748d46-kube-api-access-t9npg\") pod \"designate-operator-controller-manager-6788cc6d75-nsvrj\" (UID: \"b1871043-c496-421e-8055-817652748d46\") " pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-nsvrj" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.624546 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nthcr\" (UniqueName: \"kubernetes.io/projected/1a8a4d9e-ee5e-4235-bae4-23eb196dac78-kube-api-access-nthcr\") pod \"glance-operator-controller-manager-85fbd69fcd-ns9p8\" (UID: \"1a8a4d9e-ee5e-4235-bae4-23eb196dac78\") " pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-ns9p8" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.641062 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgqmv\" (UniqueName: \"kubernetes.io/projected/eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb-kube-api-access-pgqmv\") pod \"infra-operator-controller-manager-6c55d8d69b-f7k5g\" (UID: \"eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb\") " pod="openstack-operators/infra-operator-controller-manager-6c55d8d69b-f7k5g" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.641164 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvz4l\" (UniqueName: \"kubernetes.io/projected/3e91b7b4-9289-4769-83a7-4cd35038aaad-kube-api-access-rvz4l\") pod \"keystone-operator-controller-manager-79cc9d59f5-ftf2c\" (UID: \"3e91b7b4-9289-4769-83a7-4cd35038aaad\") " pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-ftf2c" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.641420 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ws9k\" (UniqueName: \"kubernetes.io/projected/dff222d7-fd30-4d3a-839a-6478da00ef65-kube-api-access-4ws9k\") pod \"horizon-operator-controller-manager-7d5d9fd47f-dhznh\" (UID: \"dff222d7-fd30-4d3a-839a-6478da00ef65\") " pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-dhznh" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.643391 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-jhjgl" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.643822 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb-cert\") pod \"infra-operator-controller-manager-6c55d8d69b-f7k5g\" (UID: \"eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb\") " pod="openstack-operators/infra-operator-controller-manager-6c55d8d69b-f7k5g" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.643880 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnz2g\" (UniqueName: \"kubernetes.io/projected/c637ccff-ec15-453e-9d0b-1e9d013f5f60-kube-api-access-dnz2g\") pod \"ironic-operator-controller-manager-54485f899-qjnsp\" (UID: \"c637ccff-ec15-453e-9d0b-1e9d013f5f60\") " pod="openstack-operators/ironic-operator-controller-manager-54485f899-qjnsp" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.651138 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb-cert\") pod \"infra-operator-controller-manager-6c55d8d69b-f7k5g\" (UID: \"eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb\") " pod="openstack-operators/infra-operator-controller-manager-6c55d8d69b-f7k5g" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.654185 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-5cbc8c7f96-f2qv7"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.656129 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-f2qv7" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.676548 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-64d7c556cd-vkmkn"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.677853 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-vkmkn" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.684838 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-v24p2" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.689659 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-lr8gx" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.707203 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5cbc8c7f96-f2qv7"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.731188 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgqmv\" (UniqueName: \"kubernetes.io/projected/eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb-kube-api-access-pgqmv\") pod \"infra-operator-controller-manager-6c55d8d69b-f7k5g\" (UID: \"eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb\") " pod="openstack-operators/infra-operator-controller-manager-6c55d8d69b-f7k5g" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.736121 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ws9k\" (UniqueName: \"kubernetes.io/projected/dff222d7-fd30-4d3a-839a-6478da00ef65-kube-api-access-4ws9k\") pod \"horizon-operator-controller-manager-7d5d9fd47f-dhznh\" (UID: \"dff222d7-fd30-4d3a-839a-6478da00ef65\") " pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-dhznh" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.736270 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-64d7c556cd-vkmkn"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.748150 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vn8bd\" (UniqueName: \"kubernetes.io/projected/3685e169-7bca-47ae-b5bc-5945db4fa054-kube-api-access-vn8bd\") pod \"manila-operator-controller-manager-5cbc8c7f96-f2qv7\" (UID: \"3685e169-7bca-47ae-b5bc-5945db4fa054\") " pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-f2qv7" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.748239 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnz2g\" (UniqueName: \"kubernetes.io/projected/c637ccff-ec15-453e-9d0b-1e9d013f5f60-kube-api-access-dnz2g\") pod \"ironic-operator-controller-manager-54485f899-qjnsp\" (UID: \"c637ccff-ec15-453e-9d0b-1e9d013f5f60\") " pod="openstack-operators/ironic-operator-controller-manager-54485f899-qjnsp" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.748288 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvz4l\" (UniqueName: \"kubernetes.io/projected/3e91b7b4-9289-4769-83a7-4cd35038aaad-kube-api-access-rvz4l\") pod \"keystone-operator-controller-manager-79cc9d59f5-ftf2c\" (UID: \"3e91b7b4-9289-4769-83a7-4cd35038aaad\") " pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-ftf2c" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.761500 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-dhznh" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.782923 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-6c55d8d69b-f7k5g" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.785222 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnz2g\" (UniqueName: \"kubernetes.io/projected/c637ccff-ec15-453e-9d0b-1e9d013f5f60-kube-api-access-dnz2g\") pod \"ironic-operator-controller-manager-54485f899-qjnsp\" (UID: \"c637ccff-ec15-453e-9d0b-1e9d013f5f60\") " pod="openstack-operators/ironic-operator-controller-manager-54485f899-qjnsp" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.801286 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-58879495c-wg54q"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.808580 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-58879495c-wg54q" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.814312 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-rkrrr" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.832348 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvz4l\" (UniqueName: \"kubernetes.io/projected/3e91b7b4-9289-4769-83a7-4cd35038aaad-kube-api-access-rvz4l\") pod \"keystone-operator-controller-manager-79cc9d59f5-ftf2c\" (UID: \"3e91b7b4-9289-4769-83a7-4cd35038aaad\") " pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-ftf2c" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.847959 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-79d658b66d-tkgmm"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.851641 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-79d658b66d-tkgmm" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.854673 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-kkmcj" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.856021 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vn8bd\" (UniqueName: \"kubernetes.io/projected/3685e169-7bca-47ae-b5bc-5945db4fa054-kube-api-access-vn8bd\") pod \"manila-operator-controller-manager-5cbc8c7f96-f2qv7\" (UID: \"3685e169-7bca-47ae-b5bc-5945db4fa054\") " pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-f2qv7" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.856173 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvp7r\" (UniqueName: \"kubernetes.io/projected/3f797f20-b787-4429-a862-badf66ed38ea-kube-api-access-mvp7r\") pod \"mariadb-operator-controller-manager-64d7c556cd-vkmkn\" (UID: \"3f797f20-b787-4429-a862-badf66ed38ea\") " pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-vkmkn" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.872723 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-nsvrj" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.876684 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-58879495c-wg54q"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.883184 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-ns9p8" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.907584 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vn8bd\" (UniqueName: \"kubernetes.io/projected/3685e169-7bca-47ae-b5bc-5945db4fa054-kube-api-access-vn8bd\") pod \"manila-operator-controller-manager-5cbc8c7f96-f2qv7\" (UID: \"3685e169-7bca-47ae-b5bc-5945db4fa054\") " pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-f2qv7" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.926640 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-54485f899-qjnsp" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.943830 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-ftf2c" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.960074 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-79d658b66d-tkgmm"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.958457 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vj76q\" (UniqueName: \"kubernetes.io/projected/11014c8f-0b84-470f-aaf8-0d029800d594-kube-api-access-vj76q\") pod \"nova-operator-controller-manager-79d658b66d-tkgmm\" (UID: \"11014c8f-0b84-470f-aaf8-0d029800d594\") " pod="openstack-operators/nova-operator-controller-manager-79d658b66d-tkgmm" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.961530 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvp7r\" (UniqueName: \"kubernetes.io/projected/3f797f20-b787-4429-a862-badf66ed38ea-kube-api-access-mvp7r\") pod \"mariadb-operator-controller-manager-64d7c556cd-vkmkn\" (UID: \"3f797f20-b787-4429-a862-badf66ed38ea\") " pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-vkmkn" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.961652 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7smt\" (UniqueName: \"kubernetes.io/projected/f5edfca9-f892-409d-856c-70e757072464-kube-api-access-z7smt\") pod \"neutron-operator-controller-manager-58879495c-wg54q\" (UID: \"f5edfca9-f892-409d-856c-70e757072464\") " pod="openstack-operators/neutron-operator-controller-manager-58879495c-wg54q" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.971063 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-d5fb87cb8-8z6z7"] Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.972599 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-8z6z7" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.981120 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-nk5tv" Dec 02 16:59:26 crc kubenswrapper[4747]: I1202 16:59:26.998728 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-d5fb87cb8-8z6z7"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.001780 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvp7r\" (UniqueName: \"kubernetes.io/projected/3f797f20-b787-4429-a862-badf66ed38ea-kube-api-access-mvp7r\") pod \"mariadb-operator-controller-manager-64d7c556cd-vkmkn\" (UID: \"3f797f20-b787-4429-a862-badf66ed38ea\") " pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-vkmkn" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.004321 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-f2qv7" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.020420 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.022625 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.036214 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.039758 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.040310 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-hq9pw" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.043459 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-vkmkn" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.054260 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-j9r8v"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.055992 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-j9r8v" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.061861 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-wnprb" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.063240 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7smt\" (UniqueName: \"kubernetes.io/projected/f5edfca9-f892-409d-856c-70e757072464-kube-api-access-z7smt\") pod \"neutron-operator-controller-manager-58879495c-wg54q\" (UID: \"f5edfca9-f892-409d-856c-70e757072464\") " pod="openstack-operators/neutron-operator-controller-manager-58879495c-wg54q" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.063283 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2tlg\" (UniqueName: \"kubernetes.io/projected/93ff580d-18b2-4e1f-af0b-f2bd36b1e0db-kube-api-access-p2tlg\") pod \"octavia-operator-controller-manager-d5fb87cb8-8z6z7\" (UID: \"93ff580d-18b2-4e1f-af0b-f2bd36b1e0db\") " pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-8z6z7" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.063335 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vj76q\" (UniqueName: \"kubernetes.io/projected/11014c8f-0b84-470f-aaf8-0d029800d594-kube-api-access-vj76q\") pod \"nova-operator-controller-manager-79d658b66d-tkgmm\" (UID: \"11014c8f-0b84-470f-aaf8-0d029800d594\") " pod="openstack-operators/nova-operator-controller-manager-79d658b66d-tkgmm" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.070146 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-867d87977b-kpccv"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.072076 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-867d87977b-kpccv" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.080212 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-qwnjb" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.101539 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-j9r8v"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.120849 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vj76q\" (UniqueName: \"kubernetes.io/projected/11014c8f-0b84-470f-aaf8-0d029800d594-kube-api-access-vj76q\") pod \"nova-operator-controller-manager-79d658b66d-tkgmm\" (UID: \"11014c8f-0b84-470f-aaf8-0d029800d594\") " pod="openstack-operators/nova-operator-controller-manager-79d658b66d-tkgmm" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.155240 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.156439 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7smt\" (UniqueName: \"kubernetes.io/projected/f5edfca9-f892-409d-856c-70e757072464-kube-api-access-z7smt\") pod \"neutron-operator-controller-manager-58879495c-wg54q\" (UID: \"f5edfca9-f892-409d-856c-70e757072464\") " pod="openstack-operators/neutron-operator-controller-manager-58879495c-wg54q" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.157070 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.165238 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zt2np\" (UniqueName: \"kubernetes.io/projected/50fe2539-78c5-4fde-9554-30143fdc520f-kube-api-access-zt2np\") pod \"openstack-baremetal-operator-controller-manager-77868f484-n5tnz\" (UID: \"50fe2539-78c5-4fde-9554-30143fdc520f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.165325 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmnsr\" (UniqueName: \"kubernetes.io/projected/d539442d-27c5-4383-9a11-589905951e21-kube-api-access-wmnsr\") pod \"ovn-operator-controller-manager-5b67cfc8fb-j9r8v\" (UID: \"d539442d-27c5-4383-9a11-589905951e21\") " pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-j9r8v" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.165383 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50fe2539-78c5-4fde-9554-30143fdc520f-cert\") pod \"openstack-baremetal-operator-controller-manager-77868f484-n5tnz\" (UID: \"50fe2539-78c5-4fde-9554-30143fdc520f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.165485 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zj2cl\" (UniqueName: \"kubernetes.io/projected/c8dc257d-bb42-43c0-b784-483ccb97f95f-kube-api-access-zj2cl\") pod \"placement-operator-controller-manager-867d87977b-kpccv\" (UID: \"c8dc257d-bb42-43c0-b784-483ccb97f95f\") " pod="openstack-operators/placement-operator-controller-manager-867d87977b-kpccv" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.165532 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2tlg\" (UniqueName: \"kubernetes.io/projected/93ff580d-18b2-4e1f-af0b-f2bd36b1e0db-kube-api-access-p2tlg\") pod \"octavia-operator-controller-manager-d5fb87cb8-8z6z7\" (UID: \"93ff580d-18b2-4e1f-af0b-f2bd36b1e0db\") " pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-8z6z7" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.166242 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-58879495c-wg54q" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.171747 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-v5tkv" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.172239 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-867d87977b-kpccv"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.211102 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2tlg\" (UniqueName: \"kubernetes.io/projected/93ff580d-18b2-4e1f-af0b-f2bd36b1e0db-kube-api-access-p2tlg\") pod \"octavia-operator-controller-manager-d5fb87cb8-8z6z7\" (UID: \"93ff580d-18b2-4e1f-af0b-f2bd36b1e0db\") " pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-8z6z7" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.211829 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.212953 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.213440 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-79d658b66d-tkgmm" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.217884 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-2hmf8" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.258840 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.271745 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zj2cl\" (UniqueName: \"kubernetes.io/projected/c8dc257d-bb42-43c0-b784-483ccb97f95f-kube-api-access-zj2cl\") pod \"placement-operator-controller-manager-867d87977b-kpccv\" (UID: \"c8dc257d-bb42-43c0-b784-483ccb97f95f\") " pod="openstack-operators/placement-operator-controller-manager-867d87977b-kpccv" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.271855 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zt2np\" (UniqueName: \"kubernetes.io/projected/50fe2539-78c5-4fde-9554-30143fdc520f-kube-api-access-zt2np\") pod \"openstack-baremetal-operator-controller-manager-77868f484-n5tnz\" (UID: \"50fe2539-78c5-4fde-9554-30143fdc520f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.271885 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmnsr\" (UniqueName: \"kubernetes.io/projected/d539442d-27c5-4383-9a11-589905951e21-kube-api-access-wmnsr\") pod \"ovn-operator-controller-manager-5b67cfc8fb-j9r8v\" (UID: \"d539442d-27c5-4383-9a11-589905951e21\") " pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-j9r8v" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.271948 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50fe2539-78c5-4fde-9554-30143fdc520f-cert\") pod \"openstack-baremetal-operator-controller-manager-77868f484-n5tnz\" (UID: \"50fe2539-78c5-4fde-9554-30143fdc520f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.272037 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqrlh\" (UniqueName: \"kubernetes.io/projected/29996641-038c-4bb4-8ed8-4cc853ab4369-kube-api-access-tqrlh\") pod \"swift-operator-controller-manager-8f6687c44-lkxqf\" (UID: \"29996641-038c-4bb4-8ed8-4cc853ab4369\") " pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf" Dec 02 16:59:27 crc kubenswrapper[4747]: E1202 16:59:27.272919 4747 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 16:59:27 crc kubenswrapper[4747]: E1202 16:59:27.272984 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/50fe2539-78c5-4fde-9554-30143fdc520f-cert podName:50fe2539-78c5-4fde-9554-30143fdc520f nodeName:}" failed. No retries permitted until 2025-12-02 16:59:27.772964336 +0000 UTC m=+998.299853085 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/50fe2539-78c5-4fde-9554-30143fdc520f-cert") pod "openstack-baremetal-operator-controller-manager-77868f484-n5tnz" (UID: "50fe2539-78c5-4fde-9554-30143fdc520f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.314768 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmnsr\" (UniqueName: \"kubernetes.io/projected/d539442d-27c5-4383-9a11-589905951e21-kube-api-access-wmnsr\") pod \"ovn-operator-controller-manager-5b67cfc8fb-j9r8v\" (UID: \"d539442d-27c5-4383-9a11-589905951e21\") " pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-j9r8v" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.327939 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zt2np\" (UniqueName: \"kubernetes.io/projected/50fe2539-78c5-4fde-9554-30143fdc520f-kube-api-access-zt2np\") pod \"openstack-baremetal-operator-controller-manager-77868f484-n5tnz\" (UID: \"50fe2539-78c5-4fde-9554-30143fdc520f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.328027 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-8z6z7" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.328747 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zj2cl\" (UniqueName: \"kubernetes.io/projected/c8dc257d-bb42-43c0-b784-483ccb97f95f-kube-api-access-zj2cl\") pod \"placement-operator-controller-manager-867d87977b-kpccv\" (UID: \"c8dc257d-bb42-43c0-b784-483ccb97f95f\") " pod="openstack-operators/placement-operator-controller-manager-867d87977b-kpccv" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.337203 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.346706 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-bb86466d8-zzcts"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.378671 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-bb86466d8-zzcts" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.380595 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxlc5\" (UniqueName: \"kubernetes.io/projected/6ea9539a-d252-4870-bdbf-4bc6d033840c-kube-api-access-fxlc5\") pod \"telemetry-operator-controller-manager-695797c565-mwsvx\" (UID: \"6ea9539a-d252-4870-bdbf-4bc6d033840c\") " pod="openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.380630 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqrlh\" (UniqueName: \"kubernetes.io/projected/29996641-038c-4bb4-8ed8-4cc853ab4369-kube-api-access-tqrlh\") pod \"swift-operator-controller-manager-8f6687c44-lkxqf\" (UID: \"29996641-038c-4bb4-8ed8-4cc853ab4369\") " pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.390966 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6b56b8849f-swpq5"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.392243 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-swpq5" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.445075 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-j9r8v" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.449317 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-dzpsh" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.535156 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgph5\" (UniqueName: \"kubernetes.io/projected/e1603c18-0e89-40a6-bb71-549cd8db07c6-kube-api-access-kgph5\") pod \"test-operator-controller-manager-bb86466d8-zzcts\" (UID: \"e1603c18-0e89-40a6-bb71-549cd8db07c6\") " pod="openstack-operators/test-operator-controller-manager-bb86466d8-zzcts" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.535240 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxlc5\" (UniqueName: \"kubernetes.io/projected/6ea9539a-d252-4870-bdbf-4bc6d033840c-kube-api-access-fxlc5\") pod \"telemetry-operator-controller-manager-695797c565-mwsvx\" (UID: \"6ea9539a-d252-4870-bdbf-4bc6d033840c\") " pod="openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.535353 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x98l7\" (UniqueName: \"kubernetes.io/projected/1183ccf7-36cd-41ee-96d7-cb7272989af0-kube-api-access-x98l7\") pod \"watcher-operator-controller-manager-6b56b8849f-swpq5\" (UID: \"1183ccf7-36cd-41ee-96d7-cb7272989af0\") " pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-swpq5" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.536424 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-5cmph" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.537789 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqrlh\" (UniqueName: \"kubernetes.io/projected/29996641-038c-4bb4-8ed8-4cc853ab4369-kube-api-access-tqrlh\") pod \"swift-operator-controller-manager-8f6687c44-lkxqf\" (UID: \"29996641-038c-4bb4-8ed8-4cc853ab4369\") " pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.537840 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-bb86466d8-zzcts"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.581393 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.581688 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-867d87977b-kpccv" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.622161 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6b56b8849f-swpq5"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.706948 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxlc5\" (UniqueName: \"kubernetes.io/projected/6ea9539a-d252-4870-bdbf-4bc6d033840c-kube-api-access-fxlc5\") pod \"telemetry-operator-controller-manager-695797c565-mwsvx\" (UID: \"6ea9539a-d252-4870-bdbf-4bc6d033840c\") " pod="openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.716380 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgph5\" (UniqueName: \"kubernetes.io/projected/e1603c18-0e89-40a6-bb71-549cd8db07c6-kube-api-access-kgph5\") pod \"test-operator-controller-manager-bb86466d8-zzcts\" (UID: \"e1603c18-0e89-40a6-bb71-549cd8db07c6\") " pod="openstack-operators/test-operator-controller-manager-bb86466d8-zzcts" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.716485 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x98l7\" (UniqueName: \"kubernetes.io/projected/1183ccf7-36cd-41ee-96d7-cb7272989af0-kube-api-access-x98l7\") pod \"watcher-operator-controller-manager-6b56b8849f-swpq5\" (UID: \"1183ccf7-36cd-41ee-96d7-cb7272989af0\") " pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-swpq5" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.735397 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-78568b558-x86lp"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.737794 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-78568b558-x86lp" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.742730 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.742752 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-77lpz" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.756690 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-78568b558-x86lp"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.776331 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgph5\" (UniqueName: \"kubernetes.io/projected/e1603c18-0e89-40a6-bb71-549cd8db07c6-kube-api-access-kgph5\") pod \"test-operator-controller-manager-bb86466d8-zzcts\" (UID: \"e1603c18-0e89-40a6-bb71-549cd8db07c6\") " pod="openstack-operators/test-operator-controller-manager-bb86466d8-zzcts" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.779761 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x98l7\" (UniqueName: \"kubernetes.io/projected/1183ccf7-36cd-41ee-96d7-cb7272989af0-kube-api-access-x98l7\") pod \"watcher-operator-controller-manager-6b56b8849f-swpq5\" (UID: \"1183ccf7-36cd-41ee-96d7-cb7272989af0\") " pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-swpq5" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.824088 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50fe2539-78c5-4fde-9554-30143fdc520f-cert\") pod \"openstack-baremetal-operator-controller-manager-77868f484-n5tnz\" (UID: \"50fe2539-78c5-4fde-9554-30143fdc520f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.845542 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50fe2539-78c5-4fde-9554-30143fdc520f-cert\") pod \"openstack-baremetal-operator-controller-manager-77868f484-n5tnz\" (UID: \"50fe2539-78c5-4fde-9554-30143fdc520f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.847930 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.848804 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.848923 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.854098 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-5bfbbb859d-5b75z"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.862914 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-x4xpf" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.875091 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-698d6fd7d6-jhjgl"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.891995 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-swpq5" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.898245 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-748967c98-zjpdf"] Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.929433 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/882ad3dc-b8df-4107-855a-bdf7dc10786b-cert\") pod \"openstack-operator-controller-manager-78568b558-x86lp\" (UID: \"882ad3dc-b8df-4107-855a-bdf7dc10786b\") " pod="openstack-operators/openstack-operator-controller-manager-78568b558-x86lp" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.929774 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vl2sv\" (UniqueName: \"kubernetes.io/projected/882ad3dc-b8df-4107-855a-bdf7dc10786b-kube-api-access-vl2sv\") pod \"openstack-operator-controller-manager-78568b558-x86lp\" (UID: \"882ad3dc-b8df-4107-855a-bdf7dc10786b\") " pod="openstack-operators/openstack-operator-controller-manager-78568b558-x86lp" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.929922 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzssx\" (UniqueName: \"kubernetes.io/projected/75bdef4f-9dfa-4699-9cea-b2804869c8ef-kube-api-access-kzssx\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg\" (UID: \"75bdef4f-9dfa-4699-9cea-b2804869c8ef\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg" Dec 02 16:59:27 crc kubenswrapper[4747]: I1202 16:59:27.929593 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx" Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.004485 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz" Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.032143 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/882ad3dc-b8df-4107-855a-bdf7dc10786b-cert\") pod \"openstack-operator-controller-manager-78568b558-x86lp\" (UID: \"882ad3dc-b8df-4107-855a-bdf7dc10786b\") " pod="openstack-operators/openstack-operator-controller-manager-78568b558-x86lp" Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.032259 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vl2sv\" (UniqueName: \"kubernetes.io/projected/882ad3dc-b8df-4107-855a-bdf7dc10786b-kube-api-access-vl2sv\") pod \"openstack-operator-controller-manager-78568b558-x86lp\" (UID: \"882ad3dc-b8df-4107-855a-bdf7dc10786b\") " pod="openstack-operators/openstack-operator-controller-manager-78568b558-x86lp" Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.032299 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzssx\" (UniqueName: \"kubernetes.io/projected/75bdef4f-9dfa-4699-9cea-b2804869c8ef-kube-api-access-kzssx\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg\" (UID: \"75bdef4f-9dfa-4699-9cea-b2804869c8ef\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg" Dec 02 16:59:28 crc kubenswrapper[4747]: E1202 16:59:28.033013 4747 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 02 16:59:28 crc kubenswrapper[4747]: E1202 16:59:28.033118 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/882ad3dc-b8df-4107-855a-bdf7dc10786b-cert podName:882ad3dc-b8df-4107-855a-bdf7dc10786b nodeName:}" failed. No retries permitted until 2025-12-02 16:59:28.533095362 +0000 UTC m=+999.059984111 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/882ad3dc-b8df-4107-855a-bdf7dc10786b-cert") pod "openstack-operator-controller-manager-78568b558-x86lp" (UID: "882ad3dc-b8df-4107-855a-bdf7dc10786b") : secret "webhook-server-cert" not found Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.060232 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzssx\" (UniqueName: \"kubernetes.io/projected/75bdef4f-9dfa-4699-9cea-b2804869c8ef-kube-api-access-kzssx\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg\" (UID: \"75bdef4f-9dfa-4699-9cea-b2804869c8ef\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg" Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.065532 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-bb86466d8-zzcts" Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.075695 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vl2sv\" (UniqueName: \"kubernetes.io/projected/882ad3dc-b8df-4107-855a-bdf7dc10786b-kube-api-access-vl2sv\") pod \"openstack-operator-controller-manager-78568b558-x86lp\" (UID: \"882ad3dc-b8df-4107-855a-bdf7dc10786b\") " pod="openstack-operators/openstack-operator-controller-manager-78568b558-x86lp" Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.273235 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-jhjgl" event={"ID":"aac0c240-30e1-410d-bab3-b87965dbd297","Type":"ContainerStarted","Data":"e1fc05e021176fd22045f668fd624521fd257272f9f1b79f92169377922e3ff3"} Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.292410 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg" Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.304210 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-748967c98-zjpdf" event={"ID":"84349f11-8427-4aa8-ae24-750a6fdc5e78","Type":"ContainerStarted","Data":"0468fcbe5f4bb6e353533b6bbb1a3720a34b94994efb38ef7dccf5ed4038be91"} Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.320579 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-5b75z" event={"ID":"02a1a6c9-a064-447d-85b3-61d6de6bba1a","Type":"ContainerStarted","Data":"6154e331391736de79d49aea1a84a35e51771e68e5c8f6bb6a7fb703387834e8"} Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.580672 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/882ad3dc-b8df-4107-855a-bdf7dc10786b-cert\") pod \"openstack-operator-controller-manager-78568b558-x86lp\" (UID: \"882ad3dc-b8df-4107-855a-bdf7dc10786b\") " pod="openstack-operators/openstack-operator-controller-manager-78568b558-x86lp" Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.604214 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/882ad3dc-b8df-4107-855a-bdf7dc10786b-cert\") pod \"openstack-operator-controller-manager-78568b558-x86lp\" (UID: \"882ad3dc-b8df-4107-855a-bdf7dc10786b\") " pod="openstack-operators/openstack-operator-controller-manager-78568b558-x86lp" Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.844876 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5cbc8c7f96-f2qv7"] Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.863967 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-64d7c556cd-vkmkn"] Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.870199 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-85fbd69fcd-ns9p8"] Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.870659 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-78568b558-x86lp" Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.878508 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-6788cc6d75-nsvrj"] Dec 02 16:59:28 crc kubenswrapper[4747]: W1202 16:59:28.885226 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1a8a4d9e_ee5e_4235_bae4_23eb196dac78.slice/crio-0916d2fc3756aaf36c8731a6eceb9fd67b0962f8e46e57b655e4fa7deaf64514 WatchSource:0}: Error finding container 0916d2fc3756aaf36c8731a6eceb9fd67b0962f8e46e57b655e4fa7deaf64514: Status 404 returned error can't find the container with id 0916d2fc3756aaf36c8731a6eceb9fd67b0962f8e46e57b655e4fa7deaf64514 Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.912062 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-6c55d8d69b-f7k5g"] Dec 02 16:59:28 crc kubenswrapper[4747]: I1202 16:59:28.919546 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-54485f899-qjnsp"] Dec 02 16:59:28 crc kubenswrapper[4747]: W1202 16:59:28.920896 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb1871043_c496_421e_8055_817652748d46.slice/crio-058bfe6f537e237f656e317857155c8eed81f53addb592f988b80a6d479c667f WatchSource:0}: Error finding container 058bfe6f537e237f656e317857155c8eed81f53addb592f988b80a6d479c667f: Status 404 returned error can't find the container with id 058bfe6f537e237f656e317857155c8eed81f53addb592f988b80a6d479c667f Dec 02 16:59:28 crc kubenswrapper[4747]: W1202 16:59:28.932497 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb160d05_1b0b_4b75_ad5f_8c9efd2dbbbb.slice/crio-9ce9a5ed4673c32a717377bc38cca8fe4ba2de896feeffb2092c7233c7d5a0cf WatchSource:0}: Error finding container 9ce9a5ed4673c32a717377bc38cca8fe4ba2de896feeffb2092c7233c7d5a0cf: Status 404 returned error can't find the container with id 9ce9a5ed4673c32a717377bc38cca8fe4ba2de896feeffb2092c7233c7d5a0cf Dec 02 16:59:28 crc kubenswrapper[4747]: W1202 16:59:28.937465 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc637ccff_ec15_453e_9d0b_1e9d013f5f60.slice/crio-d6769a9fac9b8f576f978d6a31dc5972805c63a79ca8faff1b53c9136e526c84 WatchSource:0}: Error finding container d6769a9fac9b8f576f978d6a31dc5972805c63a79ca8faff1b53c9136e526c84: Status 404 returned error can't find the container with id d6769a9fac9b8f576f978d6a31dc5972805c63a79ca8faff1b53c9136e526c84 Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.342930 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-d5fb87cb8-8z6z7"] Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.365806 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-54485f899-qjnsp" event={"ID":"c637ccff-ec15-453e-9d0b-1e9d013f5f60","Type":"ContainerStarted","Data":"d6769a9fac9b8f576f978d6a31dc5972805c63a79ca8faff1b53c9136e526c84"} Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.371450 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6b56b8849f-swpq5"] Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.388564 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-79d658b66d-tkgmm"] Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.389931 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-ns9p8" event={"ID":"1a8a4d9e-ee5e-4235-bae4-23eb196dac78","Type":"ContainerStarted","Data":"0916d2fc3756aaf36c8731a6eceb9fd67b0962f8e46e57b655e4fa7deaf64514"} Dec 02 16:59:29 crc kubenswrapper[4747]: W1202 16:59:29.390554 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93ff580d_18b2_4e1f_af0b_f2bd36b1e0db.slice/crio-7944c1565669a88e5076224ad3cf9b53d51186bd176dc6408d8cbf4109d22f44 WatchSource:0}: Error finding container 7944c1565669a88e5076224ad3cf9b53d51186bd176dc6408d8cbf4109d22f44: Status 404 returned error can't find the container with id 7944c1565669a88e5076224ad3cf9b53d51186bd176dc6408d8cbf4109d22f44 Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.391393 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-6c55d8d69b-f7k5g" event={"ID":"eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb","Type":"ContainerStarted","Data":"9ce9a5ed4673c32a717377bc38cca8fe4ba2de896feeffb2092c7233c7d5a0cf"} Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.394181 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-vkmkn" event={"ID":"3f797f20-b787-4429-a862-badf66ed38ea","Type":"ContainerStarted","Data":"f527047749c7a747ec2d64e910a104a83f6245ecaee2cc7819becdd85ca53f13"} Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.410878 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-j9r8v"] Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.439759 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-nsvrj" event={"ID":"b1871043-c496-421e-8055-817652748d46","Type":"ContainerStarted","Data":"058bfe6f537e237f656e317857155c8eed81f53addb592f988b80a6d479c667f"} Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.461741 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-79cc9d59f5-ftf2c"] Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.476826 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-f2qv7" event={"ID":"3685e169-7bca-47ae-b5bc-5945db4fa054","Type":"ContainerStarted","Data":"12f2233f7b46d664447ba7581d93ec02b2520b60ad721d78cd8308599c4124dd"} Dec 02 16:59:29 crc kubenswrapper[4747]: E1202 16:59:29.485392 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:f076b8d9e85881d9c3cb5272b13db7f5e05d2e9da884c17b677a844112831907,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tqrlh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-8f6687c44-lkxqf_openstack-operators(29996641-038c-4bb4-8ed8-4cc853ab4369): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.504848 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz"] Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.517556 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf"] Dec 02 16:59:29 crc kubenswrapper[4747]: E1202 16:59:29.522825 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:49180c7bd4f0071e43ae7044260a3a97c4aa34fcbcb2d0d4573df449765ed391,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kgph5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-bb86466d8-zzcts_openstack-operators(e1603c18-0e89-40a6-bb71-549cd8db07c6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 16:59:29 crc kubenswrapper[4747]: E1202 16:59:29.523330 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kzssx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg_openstack-operators(75bdef4f-9dfa-4699-9cea-b2804869c8ef): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 16:59:29 crc kubenswrapper[4747]: E1202 16:59:29.523814 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:fd917de0cf800ec284ee0c3f2906a06d85ea18cb75a5b06c8eb305750467986d,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zj2cl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-867d87977b-kpccv_openstack-operators(c8dc257d-bb42-43c0-b784-483ccb97f95f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 16:59:29 crc kubenswrapper[4747]: E1202 16:59:29.524475 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg" podUID="75bdef4f-9dfa-4699-9cea-b2804869c8ef" Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.545871 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-58879495c-wg54q"] Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.564375 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-dhznh"] Dec 02 16:59:29 crc kubenswrapper[4747]: E1202 16:59:29.566070 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:1463c43243c75f56609cbae6bee2f86d411107181775721cb097cbd22fcae1d1,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z7smt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-58879495c-wg54q_openstack-operators(f5edfca9-f892-409d-856c-70e757072464): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 16:59:29 crc kubenswrapper[4747]: E1202 16:59:29.567148 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:78d91c3cdd5eda41c2cd6d4a8491844e161dc33f6221be8cb822b2107d7ff46f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fxlc5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-695797c565-mwsvx_openstack-operators(6ea9539a-d252-4870-bdbf-4bc6d033840c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.576077 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg"] Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.592960 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-867d87977b-kpccv"] Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.604011 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-78568b558-x86lp"] Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.616165 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-bb86466d8-zzcts"] Dec 02 16:59:29 crc kubenswrapper[4747]: I1202 16:59:29.625302 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx"] Dec 02 16:59:29 crc kubenswrapper[4747]: E1202 16:59:29.856920 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf" podUID="29996641-038c-4bb4-8ed8-4cc853ab4369" Dec 02 16:59:29 crc kubenswrapper[4747]: E1202 16:59:29.906563 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-bb86466d8-zzcts" podUID="e1603c18-0e89-40a6-bb71-549cd8db07c6" Dec 02 16:59:30 crc kubenswrapper[4747]: E1202 16:59:30.230449 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx" podUID="6ea9539a-d252-4870-bdbf-4bc6d033840c" Dec 02 16:59:30 crc kubenswrapper[4747]: E1202 16:59:30.260503 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-58879495c-wg54q" podUID="f5edfca9-f892-409d-856c-70e757072464" Dec 02 16:59:30 crc kubenswrapper[4747]: E1202 16:59:30.320580 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-867d87977b-kpccv" podUID="c8dc257d-bb42-43c0-b784-483ccb97f95f" Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.513408 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79d658b66d-tkgmm" event={"ID":"11014c8f-0b84-470f-aaf8-0d029800d594","Type":"ContainerStarted","Data":"242e6321ab9f52284f621ab80ef8e6074c62572433375cbab8d8f695e44440bf"} Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.520164 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-867d87977b-kpccv" event={"ID":"c8dc257d-bb42-43c0-b784-483ccb97f95f","Type":"ContainerStarted","Data":"06334fc35529fbdf3207ee8a4fb2d6462af4de579e07af3b11ca45b4abbd3611"} Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.520212 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-867d87977b-kpccv" event={"ID":"c8dc257d-bb42-43c0-b784-483ccb97f95f","Type":"ContainerStarted","Data":"c3d50ac1790737217839d8c618884598db961f23e7ff2ff5a4b8c70eca0f7ac0"} Dec 02 16:59:30 crc kubenswrapper[4747]: E1202 16:59:30.534621 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:fd917de0cf800ec284ee0c3f2906a06d85ea18cb75a5b06c8eb305750467986d\\\"\"" pod="openstack-operators/placement-operator-controller-manager-867d87977b-kpccv" podUID="c8dc257d-bb42-43c0-b784-483ccb97f95f" Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.535112 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz" event={"ID":"50fe2539-78c5-4fde-9554-30143fdc520f","Type":"ContainerStarted","Data":"fbba987356b6b1f1d7169b47022d9df75976a4049f69c2d96d80f215c9506b9d"} Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.550731 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-78568b558-x86lp" event={"ID":"882ad3dc-b8df-4107-855a-bdf7dc10786b","Type":"ContainerStarted","Data":"dfb33b40b2c704f6df0e9b46dc50431142681987bd4b4db554f2765a304646b5"} Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.550833 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-78568b558-x86lp" event={"ID":"882ad3dc-b8df-4107-855a-bdf7dc10786b","Type":"ContainerStarted","Data":"16dbbece17d2b915b9bd54eabb501500a09696ad68fe666baa5e21914cb174b1"} Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.557675 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-dhznh" event={"ID":"dff222d7-fd30-4d3a-839a-6478da00ef65","Type":"ContainerStarted","Data":"d9413b324376fa1a4551b2e3ac8158432c20e8061efea6b438c17aec631edd14"} Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.591453 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-swpq5" event={"ID":"1183ccf7-36cd-41ee-96d7-cb7272989af0","Type":"ContainerStarted","Data":"5c9ccba51671ceb330f9e0b0f7fb50451ccb35ca8df7528ac1de1e18a9394ebc"} Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.646391 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf" event={"ID":"29996641-038c-4bb4-8ed8-4cc853ab4369","Type":"ContainerStarted","Data":"a886527f2e4234d06447ae0929c6b660d1ff1bb8fc85a856fd639b7480d86673"} Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.646451 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf" event={"ID":"29996641-038c-4bb4-8ed8-4cc853ab4369","Type":"ContainerStarted","Data":"ae431e0baaae67220476e19cdb04a4e900d4eff576e0e291686745f1fd9ad11a"} Dec 02 16:59:30 crc kubenswrapper[4747]: E1202 16:59:30.661760 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:f076b8d9e85881d9c3cb5272b13db7f5e05d2e9da884c17b677a844112831907\\\"\"" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf" podUID="29996641-038c-4bb4-8ed8-4cc853ab4369" Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.677625 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg" event={"ID":"75bdef4f-9dfa-4699-9cea-b2804869c8ef","Type":"ContainerStarted","Data":"9d4a4b4d614581f5d8a3240eb14b17cd12b897fdfbb6f6d3e40c7c8c8f96a13a"} Dec 02 16:59:30 crc kubenswrapper[4747]: E1202 16:59:30.683976 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg" podUID="75bdef4f-9dfa-4699-9cea-b2804869c8ef" Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.709404 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-ftf2c" event={"ID":"3e91b7b4-9289-4769-83a7-4cd35038aaad","Type":"ContainerStarted","Data":"2e2bd31f86ec404c33b9364d7f50573bcfece33e1872aced1a97055c07b1cc4e"} Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.761373 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-58879495c-wg54q" event={"ID":"f5edfca9-f892-409d-856c-70e757072464","Type":"ContainerStarted","Data":"aa4ec4d83bff0eeff25c5c48e910f60d8d95eccde1a0bd388fefc176476f2cc9"} Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.761449 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-58879495c-wg54q" event={"ID":"f5edfca9-f892-409d-856c-70e757072464","Type":"ContainerStarted","Data":"8c386214b1f698551f0848fadbc12c527be8d39d0fd2908ff95dc4510dbb3828"} Dec 02 16:59:30 crc kubenswrapper[4747]: E1202 16:59:30.765604 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:1463c43243c75f56609cbae6bee2f86d411107181775721cb097cbd22fcae1d1\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-58879495c-wg54q" podUID="f5edfca9-f892-409d-856c-70e757072464" Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.777241 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-j9r8v" event={"ID":"d539442d-27c5-4383-9a11-589905951e21","Type":"ContainerStarted","Data":"a69a8a17ca0b7dc21057f9167aff87d61d3a0760c9ee628d532f34ff2c3ee0ef"} Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.807371 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-8z6z7" event={"ID":"93ff580d-18b2-4e1f-af0b-f2bd36b1e0db","Type":"ContainerStarted","Data":"7944c1565669a88e5076224ad3cf9b53d51186bd176dc6408d8cbf4109d22f44"} Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.810156 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx" event={"ID":"6ea9539a-d252-4870-bdbf-4bc6d033840c","Type":"ContainerStarted","Data":"241cfdf448098d1c3349ed437dc17ed6bf40ab6b105272c795b0091ff4418790"} Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.810190 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx" event={"ID":"6ea9539a-d252-4870-bdbf-4bc6d033840c","Type":"ContainerStarted","Data":"e625035c0e913ecea1bd3200d3aa85e47b9337bbe5e9ce7d8ed712b1da7d3757"} Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.832122 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-bb86466d8-zzcts" event={"ID":"e1603c18-0e89-40a6-bb71-549cd8db07c6","Type":"ContainerStarted","Data":"a462844c3b9eee4c17a86db9a8035d3a224a9abcb91e6f59cc985a014acd77a3"} Dec 02 16:59:30 crc kubenswrapper[4747]: I1202 16:59:30.832193 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-bb86466d8-zzcts" event={"ID":"e1603c18-0e89-40a6-bb71-549cd8db07c6","Type":"ContainerStarted","Data":"ac113d2bd0d8ab67d28433a274f74dbd195f67ce187f99161d2ff94c450c04b4"} Dec 02 16:59:30 crc kubenswrapper[4747]: E1202 16:59:30.832679 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:78d91c3cdd5eda41c2cd6d4a8491844e161dc33f6221be8cb822b2107d7ff46f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx" podUID="6ea9539a-d252-4870-bdbf-4bc6d033840c" Dec 02 16:59:30 crc kubenswrapper[4747]: E1202 16:59:30.838729 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:49180c7bd4f0071e43ae7044260a3a97c4aa34fcbcb2d0d4573df449765ed391\\\"\"" pod="openstack-operators/test-operator-controller-manager-bb86466d8-zzcts" podUID="e1603c18-0e89-40a6-bb71-549cd8db07c6" Dec 02 16:59:31 crc kubenswrapper[4747]: I1202 16:59:31.795930 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 16:59:31 crc kubenswrapper[4747]: I1202 16:59:31.796368 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 16:59:31 crc kubenswrapper[4747]: I1202 16:59:31.848644 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-78568b558-x86lp" event={"ID":"882ad3dc-b8df-4107-855a-bdf7dc10786b","Type":"ContainerStarted","Data":"7d1ca99aba3bad335311dacc21022545bbb3d828eb138e4ac1311b83bb50cacc"} Dec 02 16:59:31 crc kubenswrapper[4747]: I1202 16:59:31.848729 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-78568b558-x86lp" Dec 02 16:59:31 crc kubenswrapper[4747]: E1202 16:59:31.854996 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:fd917de0cf800ec284ee0c3f2906a06d85ea18cb75a5b06c8eb305750467986d\\\"\"" pod="openstack-operators/placement-operator-controller-manager-867d87977b-kpccv" podUID="c8dc257d-bb42-43c0-b784-483ccb97f95f" Dec 02 16:59:31 crc kubenswrapper[4747]: E1202 16:59:31.855038 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg" podUID="75bdef4f-9dfa-4699-9cea-b2804869c8ef" Dec 02 16:59:31 crc kubenswrapper[4747]: E1202 16:59:31.855088 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:78d91c3cdd5eda41c2cd6d4a8491844e161dc33f6221be8cb822b2107d7ff46f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx" podUID="6ea9539a-d252-4870-bdbf-4bc6d033840c" Dec 02 16:59:31 crc kubenswrapper[4747]: E1202 16:59:31.855209 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:f076b8d9e85881d9c3cb5272b13db7f5e05d2e9da884c17b677a844112831907\\\"\"" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf" podUID="29996641-038c-4bb4-8ed8-4cc853ab4369" Dec 02 16:59:31 crc kubenswrapper[4747]: E1202 16:59:31.855213 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:1463c43243c75f56609cbae6bee2f86d411107181775721cb097cbd22fcae1d1\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-58879495c-wg54q" podUID="f5edfca9-f892-409d-856c-70e757072464" Dec 02 16:59:31 crc kubenswrapper[4747]: E1202 16:59:31.857024 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:49180c7bd4f0071e43ae7044260a3a97c4aa34fcbcb2d0d4573df449765ed391\\\"\"" pod="openstack-operators/test-operator-controller-manager-bb86466d8-zzcts" podUID="e1603c18-0e89-40a6-bb71-549cd8db07c6" Dec 02 16:59:31 crc kubenswrapper[4747]: I1202 16:59:31.965334 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-78568b558-x86lp" podStartSLOduration=5.965310131 podStartE2EDuration="5.965310131s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 16:59:31.96176743 +0000 UTC m=+1002.488656179" watchObservedRunningTime="2025-12-02 16:59:31.965310131 +0000 UTC m=+1002.492198880" Dec 02 16:59:38 crc kubenswrapper[4747]: I1202 16:59:38.878538 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-78568b558-x86lp" Dec 02 16:59:42 crc kubenswrapper[4747]: E1202 16:59:42.964235 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:2811f492f5663ec8660767dcb699060691c10dd809b1bb5f3a1f6b803946a653" Dec 02 16:59:42 crc kubenswrapper[4747]: E1202 16:59:42.965255 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:2811f492f5663ec8660767dcb699060691c10dd809b1bb5f3a1f6b803946a653,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4ws9k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-7d5d9fd47f-dhznh_openstack-operators(dff222d7-fd30-4d3a-839a-6478da00ef65): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 16:59:45 crc kubenswrapper[4747]: E1202 16:59:45.434984 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:1988aaf9cd245150cda123aaaa21718ccb552c47f1623b7d68804f13c47f2c6a" Dec 02 16:59:45 crc kubenswrapper[4747]: E1202 16:59:45.435573 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:1988aaf9cd245150cda123aaaa21718ccb552c47f1623b7d68804f13c47f2c6a,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x98l7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-6b56b8849f-swpq5_openstack-operators(1183ccf7-36cd-41ee-96d7-cb7272989af0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 16:59:46 crc kubenswrapper[4747]: E1202 16:59:46.089226 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:9aee58b2ca71ef9c4f12373090951090d13aa7038d0fef07ec30167f3d6ae23c" Dec 02 16:59:46 crc kubenswrapper[4747]: E1202 16:59:46.089540 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:9aee58b2ca71ef9c4f12373090951090d13aa7038d0fef07ec30167f3d6ae23c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dnz2g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-54485f899-qjnsp_openstack-operators(c637ccff-ec15-453e-9d0b-1e9d013f5f60): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 16:59:46 crc kubenswrapper[4747]: E1202 16:59:46.917505 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:4f799c74da2f1c864af24fcd5efd91ec64848972a95246eac6b5c6c4d71c1756" Dec 02 16:59:46 crc kubenswrapper[4747]: E1202 16:59:46.917816 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:4f799c74da2f1c864af24fcd5efd91ec64848972a95246eac6b5c6c4d71c1756,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rvz4l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-79cc9d59f5-ftf2c_openstack-operators(3e91b7b4-9289-4769-83a7-4cd35038aaad): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 16:59:47 crc kubenswrapper[4747]: E1202 16:59:47.659645 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:c5394efcfaeddc4231f98f1ed5267b77a8687038064cfb4302bcd0c8d6587856" Dec 02 16:59:47 crc kubenswrapper[4747]: E1202 16:59:47.660298 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:c5394efcfaeddc4231f98f1ed5267b77a8687038064cfb4302bcd0c8d6587856,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-f6w9p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-5bfbbb859d-5b75z_openstack-operators(02a1a6c9-a064-447d-85b3-61d6de6bba1a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 16:59:48 crc kubenswrapper[4747]: E1202 16:59:48.320080 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:66928f0eae5206f671ac7b21f79953e37009c54187d768dc6e03fe0a3d202b3b" Dec 02 16:59:48 crc kubenswrapper[4747]: E1202 16:59:48.320778 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:66928f0eae5206f671ac7b21f79953e37009c54187d768dc6e03fe0a3d202b3b,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_API_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_PROC_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-processor:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zt2np,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-77868f484-n5tnz_openstack-operators(50fe2539-78c5-4fde-9554-30143fdc520f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 16:59:56 crc kubenswrapper[4747]: E1202 16:59:56.597297 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:8aaaf8bb0a81358ee196af922d534c9b3f6bb47b27f4283087f7e0254638a671" Dec 02 16:59:56 crc kubenswrapper[4747]: E1202 16:59:56.598067 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:8aaaf8bb0a81358ee196af922d534c9b3f6bb47b27f4283087f7e0254638a671,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-t9npg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-6788cc6d75-nsvrj_openstack-operators(b1871043-c496-421e-8055-817652748d46): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 16:59:57 crc kubenswrapper[4747]: E1202 16:59:57.178655 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:2c837009de6475bc22534827c03df6d8649277b71f1c30de2087b6c52aafb326" Dec 02 16:59:57 crc kubenswrapper[4747]: E1202 16:59:57.178996 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:2c837009de6475bc22534827c03df6d8649277b71f1c30de2087b6c52aafb326,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wmnsr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-5b67cfc8fb-j9r8v_openstack-operators(d539442d-27c5-4383-9a11-589905951e21): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 16:59:57 crc kubenswrapper[4747]: E1202 16:59:57.743259 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:f4b6baa2b8a661351cfc24fff5aacee5aa4198106618700cfa47ec3a75f88b31" Dec 02 16:59:57 crc kubenswrapper[4747]: E1202 16:59:57.743524 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:f4b6baa2b8a661351cfc24fff5aacee5aa4198106618700cfa47ec3a75f88b31,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nthcr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-85fbd69fcd-ns9p8_openstack-operators(1a8a4d9e-ee5e-4235-bae4-23eb196dac78): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 16:59:59 crc kubenswrapper[4747]: E1202 16:59:59.100816 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:1463c43243c75f56609cbae6bee2f86d411107181775721cb097cbd22fcae1d1" Dec 02 16:59:59 crc kubenswrapper[4747]: E1202 16:59:59.101335 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:1463c43243c75f56609cbae6bee2f86d411107181775721cb097cbd22fcae1d1,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z7smt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-58879495c-wg54q_openstack-operators(f5edfca9-f892-409d-856c-70e757072464): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 16:59:59 crc kubenswrapper[4747]: E1202 16:59:59.102681 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-58879495c-wg54q" podUID="f5edfca9-f892-409d-856c-70e757072464" Dec 02 16:59:59 crc kubenswrapper[4747]: E1202 16:59:59.512524 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:78d91c3cdd5eda41c2cd6d4a8491844e161dc33f6221be8cb822b2107d7ff46f" Dec 02 16:59:59 crc kubenswrapper[4747]: E1202 16:59:59.512798 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:78d91c3cdd5eda41c2cd6d4a8491844e161dc33f6221be8cb822b2107d7ff46f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fxlc5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-695797c565-mwsvx_openstack-operators(6ea9539a-d252-4870-bdbf-4bc6d033840c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 16:59:59 crc kubenswrapper[4747]: E1202 16:59:59.514092 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx" podUID="6ea9539a-d252-4870-bdbf-4bc6d033840c" Dec 02 16:59:59 crc kubenswrapper[4747]: E1202 16:59:59.905487 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:49180c7bd4f0071e43ae7044260a3a97c4aa34fcbcb2d0d4573df449765ed391" Dec 02 16:59:59 crc kubenswrapper[4747]: E1202 16:59:59.906046 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:49180c7bd4f0071e43ae7044260a3a97c4aa34fcbcb2d0d4573df449765ed391,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kgph5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-bb86466d8-zzcts_openstack-operators(e1603c18-0e89-40a6-bb71-549cd8db07c6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 16:59:59 crc kubenswrapper[4747]: E1202 16:59:59.907241 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-bb86466d8-zzcts" podUID="e1603c18-0e89-40a6-bb71-549cd8db07c6" Dec 02 17:00:00 crc kubenswrapper[4747]: I1202 17:00:00.169046 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk"] Dec 02 17:00:00 crc kubenswrapper[4747]: I1202 17:00:00.170610 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk" Dec 02 17:00:00 crc kubenswrapper[4747]: I1202 17:00:00.174006 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 17:00:00 crc kubenswrapper[4747]: I1202 17:00:00.174544 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 17:00:00 crc kubenswrapper[4747]: I1202 17:00:00.177788 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk"] Dec 02 17:00:00 crc kubenswrapper[4747]: I1202 17:00:00.282015 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdtsc\" (UniqueName: \"kubernetes.io/projected/63ec13db-3dc2-442f-80ce-17fd41c3600b-kube-api-access-fdtsc\") pod \"collect-profiles-29411580-9fczk\" (UID: \"63ec13db-3dc2-442f-80ce-17fd41c3600b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk" Dec 02 17:00:00 crc kubenswrapper[4747]: I1202 17:00:00.282263 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/63ec13db-3dc2-442f-80ce-17fd41c3600b-secret-volume\") pod \"collect-profiles-29411580-9fczk\" (UID: \"63ec13db-3dc2-442f-80ce-17fd41c3600b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk" Dec 02 17:00:00 crc kubenswrapper[4747]: I1202 17:00:00.282449 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/63ec13db-3dc2-442f-80ce-17fd41c3600b-config-volume\") pod \"collect-profiles-29411580-9fczk\" (UID: \"63ec13db-3dc2-442f-80ce-17fd41c3600b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk" Dec 02 17:00:00 crc kubenswrapper[4747]: I1202 17:00:00.384807 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdtsc\" (UniqueName: \"kubernetes.io/projected/63ec13db-3dc2-442f-80ce-17fd41c3600b-kube-api-access-fdtsc\") pod \"collect-profiles-29411580-9fczk\" (UID: \"63ec13db-3dc2-442f-80ce-17fd41c3600b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk" Dec 02 17:00:00 crc kubenswrapper[4747]: I1202 17:00:00.384937 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/63ec13db-3dc2-442f-80ce-17fd41c3600b-secret-volume\") pod \"collect-profiles-29411580-9fczk\" (UID: \"63ec13db-3dc2-442f-80ce-17fd41c3600b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk" Dec 02 17:00:00 crc kubenswrapper[4747]: I1202 17:00:00.384991 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/63ec13db-3dc2-442f-80ce-17fd41c3600b-config-volume\") pod \"collect-profiles-29411580-9fczk\" (UID: \"63ec13db-3dc2-442f-80ce-17fd41c3600b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk" Dec 02 17:00:00 crc kubenswrapper[4747]: I1202 17:00:00.386571 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/63ec13db-3dc2-442f-80ce-17fd41c3600b-config-volume\") pod \"collect-profiles-29411580-9fczk\" (UID: \"63ec13db-3dc2-442f-80ce-17fd41c3600b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk" Dec 02 17:00:00 crc kubenswrapper[4747]: I1202 17:00:00.396982 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/63ec13db-3dc2-442f-80ce-17fd41c3600b-secret-volume\") pod \"collect-profiles-29411580-9fczk\" (UID: \"63ec13db-3dc2-442f-80ce-17fd41c3600b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk" Dec 02 17:00:00 crc kubenswrapper[4747]: I1202 17:00:00.403816 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdtsc\" (UniqueName: \"kubernetes.io/projected/63ec13db-3dc2-442f-80ce-17fd41c3600b-kube-api-access-fdtsc\") pod \"collect-profiles-29411580-9fczk\" (UID: \"63ec13db-3dc2-442f-80ce-17fd41c3600b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk" Dec 02 17:00:00 crc kubenswrapper[4747]: I1202 17:00:00.503591 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk" Dec 02 17:00:00 crc kubenswrapper[4747]: E1202 17:00:00.521928 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:f076b8d9e85881d9c3cb5272b13db7f5e05d2e9da884c17b677a844112831907" Dec 02 17:00:00 crc kubenswrapper[4747]: E1202 17:00:00.522211 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:f076b8d9e85881d9c3cb5272b13db7f5e05d2e9da884c17b677a844112831907,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tqrlh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-8f6687c44-lkxqf_openstack-operators(29996641-038c-4bb4-8ed8-4cc853ab4369): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 17:00:00 crc kubenswrapper[4747]: E1202 17:00:00.523404 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf" podUID="29996641-038c-4bb4-8ed8-4cc853ab4369" Dec 02 17:00:01 crc kubenswrapper[4747]: I1202 17:00:01.795642 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:00:01 crc kubenswrapper[4747]: I1202 17:00:01.796261 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:00:02 crc kubenswrapper[4747]: E1202 17:00:02.270523 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 02 17:00:02 crc kubenswrapper[4747]: E1202 17:00:02.270817 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kzssx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg_openstack-operators(75bdef4f-9dfa-4699-9cea-b2804869c8ef): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 17:00:02 crc kubenswrapper[4747]: E1202 17:00:02.273138 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg" podUID="75bdef4f-9dfa-4699-9cea-b2804869c8ef" Dec 02 17:00:02 crc kubenswrapper[4747]: I1202 17:00:02.942009 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk"] Dec 02 17:00:02 crc kubenswrapper[4747]: W1202 17:00:02.951585 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod63ec13db_3dc2_442f_80ce_17fd41c3600b.slice/crio-dbbf1907c6d080fc3aa5843b0a66c4d3c09b21497d2b9ac4e40ae488645afc72 WatchSource:0}: Error finding container dbbf1907c6d080fc3aa5843b0a66c4d3c09b21497d2b9ac4e40ae488645afc72: Status 404 returned error can't find the container with id dbbf1907c6d080fc3aa5843b0a66c4d3c09b21497d2b9ac4e40ae488645afc72 Dec 02 17:00:03 crc kubenswrapper[4747]: E1202 17:00:03.264753 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-54485f899-qjnsp" podUID="c637ccff-ec15-453e-9d0b-1e9d013f5f60" Dec 02 17:00:03 crc kubenswrapper[4747]: E1202 17:00:03.266709 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz" podUID="50fe2539-78c5-4fde-9554-30143fdc520f" Dec 02 17:00:03 crc kubenswrapper[4747]: E1202 17:00:03.266932 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-ftf2c" podUID="3e91b7b4-9289-4769-83a7-4cd35038aaad" Dec 02 17:00:03 crc kubenswrapper[4747]: I1202 17:00:03.288875 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz" event={"ID":"50fe2539-78c5-4fde-9554-30143fdc520f","Type":"ContainerStarted","Data":"0a26ff2d14b9b4d05aeeaa63098a59af07f460913d871eb4c759cca3a68f27c1"} Dec 02 17:00:03 crc kubenswrapper[4747]: I1202 17:00:03.298529 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-5b75z" event={"ID":"02a1a6c9-a064-447d-85b3-61d6de6bba1a","Type":"ContainerStarted","Data":"7ab73e96e3fd4e46d2d166a602538d21171e5a8231061e62c7fc3553dc1eb1e9"} Dec 02 17:00:03 crc kubenswrapper[4747]: I1202 17:00:03.302668 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk" event={"ID":"63ec13db-3dc2-442f-80ce-17fd41c3600b","Type":"ContainerStarted","Data":"dbbf1907c6d080fc3aa5843b0a66c4d3c09b21497d2b9ac4e40ae488645afc72"} Dec 02 17:00:03 crc kubenswrapper[4747]: I1202 17:00:03.306445 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-ftf2c" event={"ID":"3e91b7b4-9289-4769-83a7-4cd35038aaad","Type":"ContainerStarted","Data":"48cb5fb696fb42641616f3fb42b6602604fae10512dbc8d338b994c5debcfe26"} Dec 02 17:00:03 crc kubenswrapper[4747]: I1202 17:00:03.326050 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-8z6z7" event={"ID":"93ff580d-18b2-4e1f-af0b-f2bd36b1e0db","Type":"ContainerStarted","Data":"9a1e9e26beb9f6b30413d8c33e51ac8fa8d6ab70dc8340f6a219185b1edf2181"} Dec 02 17:00:03 crc kubenswrapper[4747]: I1202 17:00:03.338520 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-54485f899-qjnsp" event={"ID":"c637ccff-ec15-453e-9d0b-1e9d013f5f60","Type":"ContainerStarted","Data":"21b3cc88feb90cda6446b3b4219ca2aedc457f0d626cdf63641568921d44ca60"} Dec 02 17:00:03 crc kubenswrapper[4747]: E1202 17:00:03.421115 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-ns9p8" podUID="1a8a4d9e-ee5e-4235-bae4-23eb196dac78" Dec 02 17:00:03 crc kubenswrapper[4747]: E1202 17:00:03.421352 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-5b75z" podUID="02a1a6c9-a064-447d-85b3-61d6de6bba1a" Dec 02 17:00:03 crc kubenswrapper[4747]: E1202 17:00:03.421546 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-swpq5" podUID="1183ccf7-36cd-41ee-96d7-cb7272989af0" Dec 02 17:00:03 crc kubenswrapper[4747]: E1202 17:00:03.422018 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-j9r8v" podUID="d539442d-27c5-4383-9a11-589905951e21" Dec 02 17:00:03 crc kubenswrapper[4747]: E1202 17:00:03.422867 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-dhznh" podUID="dff222d7-fd30-4d3a-839a-6478da00ef65" Dec 02 17:00:03 crc kubenswrapper[4747]: E1202 17:00:03.433260 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-nsvrj" podUID="b1871043-c496-421e-8055-817652748d46" Dec 02 17:00:04 crc kubenswrapper[4747]: E1202 17:00:04.416001 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod63ec13db_3dc2_442f_80ce_17fd41c3600b.slice/crio-conmon-56ab241443be764e8e83c8af3146b5a848140131a2693d3d0538f7febec8b29e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod63ec13db_3dc2_442f_80ce_17fd41c3600b.slice/crio-56ab241443be764e8e83c8af3146b5a848140131a2693d3d0538f7febec8b29e.scope\": RecentStats: unable to find data in memory cache]" Dec 02 17:00:04 crc kubenswrapper[4747]: I1202 17:00:04.562812 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-nsvrj" event={"ID":"b1871043-c496-421e-8055-817652748d46","Type":"ContainerStarted","Data":"d238b311cc0927650dcead56d3c952903976fc8840d5afa7234facaed4fa0d37"} Dec 02 17:00:04 crc kubenswrapper[4747]: E1202 17:00:04.566074 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:8aaaf8bb0a81358ee196af922d534c9b3f6bb47b27f4283087f7e0254638a671\\\"\"" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-nsvrj" podUID="b1871043-c496-421e-8055-817652748d46" Dec 02 17:00:04 crc kubenswrapper[4747]: I1202 17:00:04.576427 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-ns9p8" event={"ID":"1a8a4d9e-ee5e-4235-bae4-23eb196dac78","Type":"ContainerStarted","Data":"37c85a16a67c49006e1ddba257d2cd8bf93306f9e3f16cb50951d5af93ad44ce"} Dec 02 17:00:04 crc kubenswrapper[4747]: I1202 17:00:04.594561 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-867d87977b-kpccv" event={"ID":"c8dc257d-bb42-43c0-b784-483ccb97f95f","Type":"ContainerStarted","Data":"9346d25fd2d757a5b2f82dccc38fc11fc02414a7c36ef9fa38fdf6e7cd55806f"} Dec 02 17:00:04 crc kubenswrapper[4747]: E1202 17:00:04.594886 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:f4b6baa2b8a661351cfc24fff5aacee5aa4198106618700cfa47ec3a75f88b31\\\"\"" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-ns9p8" podUID="1a8a4d9e-ee5e-4235-bae4-23eb196dac78" Dec 02 17:00:04 crc kubenswrapper[4747]: I1202 17:00:04.595439 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-867d87977b-kpccv" Dec 02 17:00:04 crc kubenswrapper[4747]: I1202 17:00:04.608405 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-6c55d8d69b-f7k5g" event={"ID":"eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb","Type":"ContainerStarted","Data":"a1279b88ffdc91b242d1bdb685ef1c846e22052a3d84b1cf3d889c2b154b82dc"} Dec 02 17:00:04 crc kubenswrapper[4747]: I1202 17:00:04.609949 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79d658b66d-tkgmm" event={"ID":"11014c8f-0b84-470f-aaf8-0d029800d594","Type":"ContainerStarted","Data":"a6fd51fb5e958026d8e24ee5fb2e5dad3d35958cd9a422c5a1138560e133b8dc"} Dec 02 17:00:04 crc kubenswrapper[4747]: I1202 17:00:04.611081 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-dhznh" event={"ID":"dff222d7-fd30-4d3a-839a-6478da00ef65","Type":"ContainerStarted","Data":"6af4d06c9cf2732b5c71c589e49c2bc94b6cd1abb6c836f14dbc7ec7d9026b1a"} Dec 02 17:00:04 crc kubenswrapper[4747]: I1202 17:00:04.624699 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-748967c98-zjpdf" event={"ID":"84349f11-8427-4aa8-ae24-750a6fdc5e78","Type":"ContainerStarted","Data":"70cadf07ebf0b4420863fad8f193f8f03d2a8cb312b109f092aab16038709126"} Dec 02 17:00:04 crc kubenswrapper[4747]: I1202 17:00:04.627462 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-vkmkn" event={"ID":"3f797f20-b787-4429-a862-badf66ed38ea","Type":"ContainerStarted","Data":"de6f90d244a91e0b2f70ea3bef647d0c786735f9cb583f67e1e628734038053c"} Dec 02 17:00:04 crc kubenswrapper[4747]: I1202 17:00:04.631125 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-jhjgl" event={"ID":"aac0c240-30e1-410d-bab3-b87965dbd297","Type":"ContainerStarted","Data":"cfcd7543b781ecc0f9493d2d82ec2db80ed3864cabd62257c634860b93b62887"} Dec 02 17:00:04 crc kubenswrapper[4747]: I1202 17:00:04.633031 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-swpq5" event={"ID":"1183ccf7-36cd-41ee-96d7-cb7272989af0","Type":"ContainerStarted","Data":"774cda1182f2d01238e8165fbed3d33fc16fe98ba792084d345b9de5bf98cca3"} Dec 02 17:00:04 crc kubenswrapper[4747]: I1202 17:00:04.639656 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-f2qv7" event={"ID":"3685e169-7bca-47ae-b5bc-5945db4fa054","Type":"ContainerStarted","Data":"c5bf199c40061f35c4fc7c28ec90f4e9ed70615d14b520f0b3343de817151c4c"} Dec 02 17:00:04 crc kubenswrapper[4747]: I1202 17:00:04.642307 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-j9r8v" event={"ID":"d539442d-27c5-4383-9a11-589905951e21","Type":"ContainerStarted","Data":"a49541ac7ac23b67880bd2351c8db1df647a4e714b90bca686ce6bbc65cc01d1"} Dec 02 17:00:04 crc kubenswrapper[4747]: E1202 17:00:04.644409 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:2c837009de6475bc22534827c03df6d8649277b71f1c30de2087b6c52aafb326\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-j9r8v" podUID="d539442d-27c5-4383-9a11-589905951e21" Dec 02 17:00:04 crc kubenswrapper[4747]: I1202 17:00:04.645558 4747 generic.go:334] "Generic (PLEG): container finished" podID="63ec13db-3dc2-442f-80ce-17fd41c3600b" containerID="56ab241443be764e8e83c8af3146b5a848140131a2693d3d0538f7febec8b29e" exitCode=0 Dec 02 17:00:04 crc kubenswrapper[4747]: I1202 17:00:04.646346 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk" event={"ID":"63ec13db-3dc2-442f-80ce-17fd41c3600b","Type":"ContainerDied","Data":"56ab241443be764e8e83c8af3146b5a848140131a2693d3d0538f7febec8b29e"} Dec 02 17:00:04 crc kubenswrapper[4747]: I1202 17:00:04.938335 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-867d87977b-kpccv" podStartSLOduration=5.970818897 podStartE2EDuration="38.938301605s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:29.523267378 +0000 UTC m=+1000.050156127" lastFinishedPulling="2025-12-02 17:00:02.490750086 +0000 UTC m=+1033.017638835" observedRunningTime="2025-12-02 17:00:04.93634477 +0000 UTC m=+1035.463233519" watchObservedRunningTime="2025-12-02 17:00:04.938301605 +0000 UTC m=+1035.465190354" Dec 02 17:00:06 crc kubenswrapper[4747]: I1202 17:00:06.000333 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-6c55d8d69b-f7k5g" Dec 02 17:00:06 crc kubenswrapper[4747]: I1202 17:00:06.000380 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-6c55d8d69b-f7k5g" event={"ID":"eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb","Type":"ContainerStarted","Data":"429f2b5b5afd917b7592743968da4e2178663cd85a0a0103514e1c977c6f77eb"} Dec 02 17:00:06 crc kubenswrapper[4747]: I1202 17:00:06.002159 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-f2qv7" event={"ID":"3685e169-7bca-47ae-b5bc-5945db4fa054","Type":"ContainerStarted","Data":"54c819f956ed8aae7089d355ad2add002eb10b97e4f7642537ba6592567c0a9d"} Dec 02 17:00:06 crc kubenswrapper[4747]: I1202 17:00:06.002633 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-f2qv7" Dec 02 17:00:06 crc kubenswrapper[4747]: I1202 17:00:06.005128 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-54485f899-qjnsp" event={"ID":"c637ccff-ec15-453e-9d0b-1e9d013f5f60","Type":"ContainerStarted","Data":"3a29aacc882579bad3665f81d9049051e4a034eea5edc25711f05f0aedf98e8c"} Dec 02 17:00:06 crc kubenswrapper[4747]: E1202 17:00:06.006894 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:2c837009de6475bc22534827c03df6d8649277b71f1c30de2087b6c52aafb326\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-j9r8v" podUID="d539442d-27c5-4383-9a11-589905951e21" Dec 02 17:00:06 crc kubenswrapper[4747]: E1202 17:00:06.009345 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:8aaaf8bb0a81358ee196af922d534c9b3f6bb47b27f4283087f7e0254638a671\\\"\"" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-nsvrj" podUID="b1871043-c496-421e-8055-817652748d46" Dec 02 17:00:06 crc kubenswrapper[4747]: E1202 17:00:06.009381 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:f4b6baa2b8a661351cfc24fff5aacee5aa4198106618700cfa47ec3a75f88b31\\\"\"" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-ns9p8" podUID="1a8a4d9e-ee5e-4235-bae4-23eb196dac78" Dec 02 17:00:06 crc kubenswrapper[4747]: I1202 17:00:06.037270 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-6c55d8d69b-f7k5g" podStartSLOduration=10.304519001 podStartE2EDuration="40.037241821s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:28.97621796 +0000 UTC m=+999.503106709" lastFinishedPulling="2025-12-02 16:59:58.70894078 +0000 UTC m=+1029.235829529" observedRunningTime="2025-12-02 17:00:06.031407725 +0000 UTC m=+1036.558296474" watchObservedRunningTime="2025-12-02 17:00:06.037241821 +0000 UTC m=+1036.564130570" Dec 02 17:00:06 crc kubenswrapper[4747]: I1202 17:00:06.108620 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-54485f899-qjnsp" podStartSLOduration=4.276237042 podStartE2EDuration="40.108592813s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:28.975755447 +0000 UTC m=+999.502644196" lastFinishedPulling="2025-12-02 17:00:04.808111218 +0000 UTC m=+1035.334999967" observedRunningTime="2025-12-02 17:00:06.099763671 +0000 UTC m=+1036.626652420" watchObservedRunningTime="2025-12-02 17:00:06.108592813 +0000 UTC m=+1036.635481562" Dec 02 17:00:06 crc kubenswrapper[4747]: I1202 17:00:06.146999 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-f2qv7" podStartSLOduration=9.148033817 podStartE2EDuration="40.146968706s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:28.902631564 +0000 UTC m=+999.429520313" lastFinishedPulling="2025-12-02 16:59:59.901566453 +0000 UTC m=+1030.428455202" observedRunningTime="2025-12-02 17:00:06.144549627 +0000 UTC m=+1036.671438376" watchObservedRunningTime="2025-12-02 17:00:06.146968706 +0000 UTC m=+1036.673857465" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.028065 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-54485f899-qjnsp" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.043782 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz" event={"ID":"50fe2539-78c5-4fde-9554-30143fdc520f","Type":"ContainerStarted","Data":"7cd99fb252a84905dd320140e3af593851b1cb20841be26ae824fd1085b0c296"} Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.045199 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.049274 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-8z6z7" event={"ID":"93ff580d-18b2-4e1f-af0b-f2bd36b1e0db","Type":"ContainerStarted","Data":"882b2edb6ae2c4d6be620c0f36cde62c6b0b00d8d7a78c24fbdc48be6332001e"} Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.050026 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-8z6z7" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.053438 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-748967c98-zjpdf" event={"ID":"84349f11-8427-4aa8-ae24-750a6fdc5e78","Type":"ContainerStarted","Data":"58ad5053ff9a84b2cae9d38a254af6277d9969aff8cc8fa1952ce9d6df06e64b"} Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.053896 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-748967c98-zjpdf" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.055475 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-vkmkn" event={"ID":"3f797f20-b787-4429-a862-badf66ed38ea","Type":"ContainerStarted","Data":"8bf2bff954e62eb1c9c67b19a216162af808f4002108799d8000d1a069a2b645"} Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.055894 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-vkmkn" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.058472 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-dhznh" event={"ID":"dff222d7-fd30-4d3a-839a-6478da00ef65","Type":"ContainerStarted","Data":"0eef7696a26661508f4c9492e0141fe6767cc417f62edc6e7eeab62b0d9aae0b"} Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.058933 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-dhznh" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.062682 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-ftf2c" event={"ID":"3e91b7b4-9289-4769-83a7-4cd35038aaad","Type":"ContainerStarted","Data":"3970a453d784aef1084fab2a63ea0f80fdd71085bcac0d676732a967114bd21a"} Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.063154 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-ftf2c" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.064776 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-jhjgl" event={"ID":"aac0c240-30e1-410d-bab3-b87965dbd297","Type":"ContainerStarted","Data":"72d0b58b3d5399914dd0280105b188b0500d7c13570a79d29d7bd3703ca52a45"} Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.065196 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-jhjgl" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.067252 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-swpq5" event={"ID":"1183ccf7-36cd-41ee-96d7-cb7272989af0","Type":"ContainerStarted","Data":"897c61324fd4a64671fb9d46965fc80aee3e4312aeaf408aee4165b3fca9b184"} Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.067716 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-swpq5" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.072806 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-5b75z" event={"ID":"02a1a6c9-a064-447d-85b3-61d6de6bba1a","Type":"ContainerStarted","Data":"10f7ce9adecffc88ae7efa592a97153fa4c263bed738f0e7a2dbde1541df5e6f"} Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.073209 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-5b75z" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.077229 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-79d658b66d-tkgmm" event={"ID":"11014c8f-0b84-470f-aaf8-0d029800d594","Type":"ContainerStarted","Data":"1cd253e434e41d1487998d667d006affd20a543223d57de831c832e2f8de2035"} Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.077257 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-79d658b66d-tkgmm" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.090213 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz" podStartSLOduration=6.469872931 podStartE2EDuration="41.090189456s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:29.462358404 +0000 UTC m=+999.989247153" lastFinishedPulling="2025-12-02 17:00:04.082674929 +0000 UTC m=+1034.609563678" observedRunningTime="2025-12-02 17:00:07.080665875 +0000 UTC m=+1037.607554614" watchObservedRunningTime="2025-12-02 17:00:07.090189456 +0000 UTC m=+1037.617078205" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.110242 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.122232 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-ftf2c" podStartSLOduration=6.516651723 podStartE2EDuration="41.122210458s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:29.475598601 +0000 UTC m=+1000.002487350" lastFinishedPulling="2025-12-02 17:00:04.081157326 +0000 UTC m=+1034.608046085" observedRunningTime="2025-12-02 17:00:07.115172768 +0000 UTC m=+1037.642061517" watchObservedRunningTime="2025-12-02 17:00:07.122210458 +0000 UTC m=+1037.649099207" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.143465 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-vkmkn" podStartSLOduration=10.145676598 podStartE2EDuration="41.143438313s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:28.902547002 +0000 UTC m=+999.429435751" lastFinishedPulling="2025-12-02 16:59:59.900308717 +0000 UTC m=+1030.427197466" observedRunningTime="2025-12-02 17:00:07.143069592 +0000 UTC m=+1037.669958341" watchObservedRunningTime="2025-12-02 17:00:07.143438313 +0000 UTC m=+1037.670327062" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.231716 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fdtsc\" (UniqueName: \"kubernetes.io/projected/63ec13db-3dc2-442f-80ce-17fd41c3600b-kube-api-access-fdtsc\") pod \"63ec13db-3dc2-442f-80ce-17fd41c3600b\" (UID: \"63ec13db-3dc2-442f-80ce-17fd41c3600b\") " Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.234949 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/63ec13db-3dc2-442f-80ce-17fd41c3600b-secret-volume\") pod \"63ec13db-3dc2-442f-80ce-17fd41c3600b\" (UID: \"63ec13db-3dc2-442f-80ce-17fd41c3600b\") " Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.235200 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/63ec13db-3dc2-442f-80ce-17fd41c3600b-config-volume\") pod \"63ec13db-3dc2-442f-80ce-17fd41c3600b\" (UID: \"63ec13db-3dc2-442f-80ce-17fd41c3600b\") " Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.252673 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63ec13db-3dc2-442f-80ce-17fd41c3600b-config-volume" (OuterVolumeSpecName: "config-volume") pod "63ec13db-3dc2-442f-80ce-17fd41c3600b" (UID: "63ec13db-3dc2-442f-80ce-17fd41c3600b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.283926 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-8z6z7" podStartSLOduration=11.97109887 podStartE2EDuration="41.283887751s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:29.396394075 +0000 UTC m=+999.923282824" lastFinishedPulling="2025-12-02 16:59:58.709182956 +0000 UTC m=+1029.236071705" observedRunningTime="2025-12-02 17:00:07.275693908 +0000 UTC m=+1037.802582647" watchObservedRunningTime="2025-12-02 17:00:07.283887751 +0000 UTC m=+1037.810776500" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.284509 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63ec13db-3dc2-442f-80ce-17fd41c3600b-kube-api-access-fdtsc" (OuterVolumeSpecName: "kube-api-access-fdtsc") pod "63ec13db-3dc2-442f-80ce-17fd41c3600b" (UID: "63ec13db-3dc2-442f-80ce-17fd41c3600b"). InnerVolumeSpecName "kube-api-access-fdtsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.285237 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63ec13db-3dc2-442f-80ce-17fd41c3600b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "63ec13db-3dc2-442f-80ce-17fd41c3600b" (UID: "63ec13db-3dc2-442f-80ce-17fd41c3600b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.286269 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-swpq5" podStartSLOduration=5.661799098 podStartE2EDuration="41.286260809s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:29.447710127 +0000 UTC m=+999.974598876" lastFinishedPulling="2025-12-02 17:00:05.072171838 +0000 UTC m=+1035.599060587" observedRunningTime="2025-12-02 17:00:07.192235502 +0000 UTC m=+1037.719124271" watchObservedRunningTime="2025-12-02 17:00:07.286260809 +0000 UTC m=+1037.813149558" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.336047 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-79d658b66d-tkgmm" podStartSLOduration=12.024137121 podStartE2EDuration="41.336021456s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:29.397155737 +0000 UTC m=+999.924044486" lastFinishedPulling="2025-12-02 16:59:58.709040072 +0000 UTC m=+1029.235928821" observedRunningTime="2025-12-02 17:00:07.333140484 +0000 UTC m=+1037.860029233" watchObservedRunningTime="2025-12-02 17:00:07.336021456 +0000 UTC m=+1037.862910205" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.337195 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-jhjgl" podStartSLOduration=10.565626895 podStartE2EDuration="41.337188959s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:27.937457388 +0000 UTC m=+998.464346137" lastFinishedPulling="2025-12-02 16:59:58.709019452 +0000 UTC m=+1029.235908201" observedRunningTime="2025-12-02 17:00:07.308969926 +0000 UTC m=+1037.835858675" watchObservedRunningTime="2025-12-02 17:00:07.337188959 +0000 UTC m=+1037.864077718" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.337704 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/63ec13db-3dc2-442f-80ce-17fd41c3600b-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.337762 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fdtsc\" (UniqueName: \"kubernetes.io/projected/63ec13db-3dc2-442f-80ce-17fd41c3600b-kube-api-access-fdtsc\") on node \"crc\" DevicePath \"\"" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.337777 4747 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/63ec13db-3dc2-442f-80ce-17fd41c3600b-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.364149 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-5b75z" podStartSLOduration=3.078035918 podStartE2EDuration="41.364127256s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:27.711745341 +0000 UTC m=+998.238634090" lastFinishedPulling="2025-12-02 17:00:05.997836679 +0000 UTC m=+1036.524725428" observedRunningTime="2025-12-02 17:00:07.355162251 +0000 UTC m=+1037.882051010" watchObservedRunningTime="2025-12-02 17:00:07.364127256 +0000 UTC m=+1037.891016005" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.382861 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-dhznh" podStartSLOduration=5.760852829 podStartE2EDuration="41.382816149s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:29.52228118 +0000 UTC m=+1000.049169929" lastFinishedPulling="2025-12-02 17:00:05.1442445 +0000 UTC m=+1035.671133249" observedRunningTime="2025-12-02 17:00:07.371093525 +0000 UTC m=+1037.897982284" watchObservedRunningTime="2025-12-02 17:00:07.382816149 +0000 UTC m=+1037.909704898" Dec 02 17:00:07 crc kubenswrapper[4747]: I1202 17:00:07.411020 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-748967c98-zjpdf" podStartSLOduration=11.774687907 podStartE2EDuration="41.410978901s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:28.00388868 +0000 UTC m=+998.530777429" lastFinishedPulling="2025-12-02 16:59:57.640179674 +0000 UTC m=+1028.167068423" observedRunningTime="2025-12-02 17:00:07.402639213 +0000 UTC m=+1037.929527972" watchObservedRunningTime="2025-12-02 17:00:07.410978901 +0000 UTC m=+1037.937867650" Dec 02 17:00:08 crc kubenswrapper[4747]: I1202 17:00:08.086423 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk" Dec 02 17:00:08 crc kubenswrapper[4747]: I1202 17:00:08.086473 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk" event={"ID":"63ec13db-3dc2-442f-80ce-17fd41c3600b","Type":"ContainerDied","Data":"dbbf1907c6d080fc3aa5843b0a66c4d3c09b21497d2b9ac4e40ae488645afc72"} Dec 02 17:00:08 crc kubenswrapper[4747]: I1202 17:00:08.086523 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dbbf1907c6d080fc3aa5843b0a66c4d3c09b21497d2b9ac4e40ae488645afc72" Dec 02 17:00:08 crc kubenswrapper[4747]: I1202 17:00:08.089990 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-748967c98-zjpdf" Dec 02 17:00:08 crc kubenswrapper[4747]: I1202 17:00:08.090746 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-64d7c556cd-vkmkn" Dec 02 17:00:08 crc kubenswrapper[4747]: I1202 17:00:08.090943 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-d5fb87cb8-8z6z7" Dec 02 17:00:08 crc kubenswrapper[4747]: I1202 17:00:08.091306 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-79d658b66d-tkgmm" Dec 02 17:00:08 crc kubenswrapper[4747]: I1202 17:00:08.115442 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-698d6fd7d6-jhjgl" Dec 02 17:00:10 crc kubenswrapper[4747]: E1202 17:00:10.763844 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:78d91c3cdd5eda41c2cd6d4a8491844e161dc33f6221be8cb822b2107d7ff46f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx" podUID="6ea9539a-d252-4870-bdbf-4bc6d033840c" Dec 02 17:00:10 crc kubenswrapper[4747]: E1202 17:00:10.763953 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:1463c43243c75f56609cbae6bee2f86d411107181775721cb097cbd22fcae1d1\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-58879495c-wg54q" podUID="f5edfca9-f892-409d-856c-70e757072464" Dec 02 17:00:12 crc kubenswrapper[4747]: E1202 17:00:12.763390 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:49180c7bd4f0071e43ae7044260a3a97c4aa34fcbcb2d0d4573df449765ed391\\\"\"" pod="openstack-operators/test-operator-controller-manager-bb86466d8-zzcts" podUID="e1603c18-0e89-40a6-bb71-549cd8db07c6" Dec 02 17:00:14 crc kubenswrapper[4747]: E1202 17:00:14.766145 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:f076b8d9e85881d9c3cb5272b13db7f5e05d2e9da884c17b677a844112831907\\\"\"" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf" podUID="29996641-038c-4bb4-8ed8-4cc853ab4369" Dec 02 17:00:15 crc kubenswrapper[4747]: E1202 17:00:15.762374 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg" podUID="75bdef4f-9dfa-4699-9cea-b2804869c8ef" Dec 02 17:00:16 crc kubenswrapper[4747]: I1202 17:00:16.494546 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-5bfbbb859d-5b75z" Dec 02 17:00:16 crc kubenswrapper[4747]: I1202 17:00:16.775069 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-7d5d9fd47f-dhznh" Dec 02 17:00:16 crc kubenswrapper[4747]: I1202 17:00:16.789828 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-6c55d8d69b-f7k5g" Dec 02 17:00:16 crc kubenswrapper[4747]: I1202 17:00:16.930498 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-54485f899-qjnsp" Dec 02 17:00:16 crc kubenswrapper[4747]: I1202 17:00:16.946292 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-79cc9d59f5-ftf2c" Dec 02 17:00:17 crc kubenswrapper[4747]: I1202 17:00:17.007739 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-5cbc8c7f96-f2qv7" Dec 02 17:00:17 crc kubenswrapper[4747]: I1202 17:00:17.585163 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-867d87977b-kpccv" Dec 02 17:00:17 crc kubenswrapper[4747]: I1202 17:00:17.896464 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-6b56b8849f-swpq5" Dec 02 17:00:18 crc kubenswrapper[4747]: I1202 17:00:18.011713 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-77868f484-n5tnz" Dec 02 17:00:18 crc kubenswrapper[4747]: I1202 17:00:18.763264 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 17:00:20 crc kubenswrapper[4747]: I1202 17:00:20.213397 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-j9r8v" event={"ID":"d539442d-27c5-4383-9a11-589905951e21","Type":"ContainerStarted","Data":"987edd2d299bedf1fb3bdbd13a44a24e9dae6c3f80c0563bfc21ea8f07df0924"} Dec 02 17:00:20 crc kubenswrapper[4747]: I1202 17:00:20.215484 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-j9r8v" Dec 02 17:00:20 crc kubenswrapper[4747]: I1202 17:00:20.229348 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-nsvrj" event={"ID":"b1871043-c496-421e-8055-817652748d46","Type":"ContainerStarted","Data":"2c37c8fd92279a7142a8f680255f72d3484c0b26f56f9f96ed1bdc8462df31c9"} Dec 02 17:00:20 crc kubenswrapper[4747]: I1202 17:00:20.230359 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-nsvrj" Dec 02 17:00:20 crc kubenswrapper[4747]: I1202 17:00:20.247061 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-j9r8v" podStartSLOduration=3.738543743 podStartE2EDuration="54.247031036s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:29.475036405 +0000 UTC m=+1000.001925154" lastFinishedPulling="2025-12-02 17:00:19.983523698 +0000 UTC m=+1050.510412447" observedRunningTime="2025-12-02 17:00:20.237609748 +0000 UTC m=+1050.764498507" watchObservedRunningTime="2025-12-02 17:00:20.247031036 +0000 UTC m=+1050.773919785" Dec 02 17:00:22 crc kubenswrapper[4747]: I1202 17:00:22.836846 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-nsvrj" podStartSLOduration=6.414082546 podStartE2EDuration="56.836823401s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:28.925957138 +0000 UTC m=+999.452845887" lastFinishedPulling="2025-12-02 17:00:19.348697993 +0000 UTC m=+1049.875586742" observedRunningTime="2025-12-02 17:00:20.283943187 +0000 UTC m=+1050.810831936" watchObservedRunningTime="2025-12-02 17:00:22.836823401 +0000 UTC m=+1053.363712150" Dec 02 17:00:23 crc kubenswrapper[4747]: I1202 17:00:23.256837 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-ns9p8" event={"ID":"1a8a4d9e-ee5e-4235-bae4-23eb196dac78","Type":"ContainerStarted","Data":"208e25f2ea64298767de091763ba1c706fcec4a719f6e12a9a1e8c8edafc0ba6"} Dec 02 17:00:23 crc kubenswrapper[4747]: I1202 17:00:23.258755 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-ns9p8" Dec 02 17:00:23 crc kubenswrapper[4747]: I1202 17:00:23.282435 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-ns9p8" podStartSLOduration=3.320943647 podStartE2EDuration="57.282405619s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:28.902765638 +0000 UTC m=+999.429654377" lastFinishedPulling="2025-12-02 17:00:22.86422759 +0000 UTC m=+1053.391116349" observedRunningTime="2025-12-02 17:00:23.277566131 +0000 UTC m=+1053.804454880" watchObservedRunningTime="2025-12-02 17:00:23.282405619 +0000 UTC m=+1053.809294368" Dec 02 17:00:24 crc kubenswrapper[4747]: I1202 17:00:24.274358 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx" event={"ID":"6ea9539a-d252-4870-bdbf-4bc6d033840c","Type":"ContainerStarted","Data":"6e0f1d53f5a6041a4f0d9b72f58c90548a97ed6b9fc27a6c4768919362eb4b74"} Dec 02 17:00:24 crc kubenswrapper[4747]: I1202 17:00:24.274735 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx" Dec 02 17:00:24 crc kubenswrapper[4747]: I1202 17:00:24.294639 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx" podStartSLOduration=4.410718323 podStartE2EDuration="58.294621932s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:29.566952703 +0000 UTC m=+1000.093841452" lastFinishedPulling="2025-12-02 17:00:23.450856312 +0000 UTC m=+1053.977745061" observedRunningTime="2025-12-02 17:00:24.292395588 +0000 UTC m=+1054.819284337" watchObservedRunningTime="2025-12-02 17:00:24.294621932 +0000 UTC m=+1054.821510681" Dec 02 17:00:26 crc kubenswrapper[4747]: I1202 17:00:26.290113 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-58879495c-wg54q" event={"ID":"f5edfca9-f892-409d-856c-70e757072464","Type":"ContainerStarted","Data":"41c51fccf2d3499769903e151b8ac91cececfd5e4ef296dbad8900cef25b9176"} Dec 02 17:00:26 crc kubenswrapper[4747]: I1202 17:00:26.291018 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-58879495c-wg54q" Dec 02 17:00:26 crc kubenswrapper[4747]: I1202 17:00:26.318033 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-58879495c-wg54q" podStartSLOduration=4.362947473 podStartE2EDuration="1m0.318003958s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:29.565862541 +0000 UTC m=+1000.092751280" lastFinishedPulling="2025-12-02 17:00:25.520919016 +0000 UTC m=+1056.047807765" observedRunningTime="2025-12-02 17:00:26.311536564 +0000 UTC m=+1056.838425313" watchObservedRunningTime="2025-12-02 17:00:26.318003958 +0000 UTC m=+1056.844892707" Dec 02 17:00:26 crc kubenswrapper[4747]: I1202 17:00:26.877303 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-6788cc6d75-nsvrj" Dec 02 17:00:27 crc kubenswrapper[4747]: I1202 17:00:27.301516 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf" event={"ID":"29996641-038c-4bb4-8ed8-4cc853ab4369","Type":"ContainerStarted","Data":"eed846a9680255dbdbd5747f38f27076446a5536cd548cdd92f65d20eddbd8a4"} Dec 02 17:00:27 crc kubenswrapper[4747]: I1202 17:00:27.301846 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf" Dec 02 17:00:27 crc kubenswrapper[4747]: I1202 17:00:27.327092 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf" podStartSLOduration=4.339395961 podStartE2EDuration="1m1.327063451s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:29.485130782 +0000 UTC m=+1000.012019531" lastFinishedPulling="2025-12-02 17:00:26.472798272 +0000 UTC m=+1056.999687021" observedRunningTime="2025-12-02 17:00:27.323440418 +0000 UTC m=+1057.850329187" watchObservedRunningTime="2025-12-02 17:00:27.327063451 +0000 UTC m=+1057.853952200" Dec 02 17:00:27 crc kubenswrapper[4747]: I1202 17:00:27.450022 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-5b67cfc8fb-j9r8v" Dec 02 17:00:29 crc kubenswrapper[4747]: I1202 17:00:29.322991 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-bb86466d8-zzcts" event={"ID":"e1603c18-0e89-40a6-bb71-549cd8db07c6","Type":"ContainerStarted","Data":"2654e424b50041328f40d90f80959f4ec760f1c674ae8b99655a114764fddb99"} Dec 02 17:00:29 crc kubenswrapper[4747]: I1202 17:00:29.324084 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-bb86466d8-zzcts" Dec 02 17:00:29 crc kubenswrapper[4747]: I1202 17:00:29.325523 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg" event={"ID":"75bdef4f-9dfa-4699-9cea-b2804869c8ef","Type":"ContainerStarted","Data":"688c56c0bf5656c6ec4ab953f034cac0f83684ca349aa5dcc17c5e8cf6afac35"} Dec 02 17:00:29 crc kubenswrapper[4747]: I1202 17:00:29.354535 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-bb86466d8-zzcts" podStartSLOduration=4.576259301 podStartE2EDuration="1m3.354506993s" podCreationTimestamp="2025-12-02 16:59:26 +0000 UTC" firstStartedPulling="2025-12-02 16:59:29.522671971 +0000 UTC m=+1000.049560720" lastFinishedPulling="2025-12-02 17:00:28.300919663 +0000 UTC m=+1058.827808412" observedRunningTime="2025-12-02 17:00:29.348829012 +0000 UTC m=+1059.875717771" watchObservedRunningTime="2025-12-02 17:00:29.354506993 +0000 UTC m=+1059.881395742" Dec 02 17:00:29 crc kubenswrapper[4747]: I1202 17:00:29.383811 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg" podStartSLOduration=3.604703501 podStartE2EDuration="1m2.383781177s" podCreationTimestamp="2025-12-02 16:59:27 +0000 UTC" firstStartedPulling="2025-12-02 16:59:29.523155755 +0000 UTC m=+1000.050044504" lastFinishedPulling="2025-12-02 17:00:28.302233431 +0000 UTC m=+1058.829122180" observedRunningTime="2025-12-02 17:00:29.368321967 +0000 UTC m=+1059.895210726" watchObservedRunningTime="2025-12-02 17:00:29.383781177 +0000 UTC m=+1059.910669946" Dec 02 17:00:31 crc kubenswrapper[4747]: I1202 17:00:31.795938 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:00:31 crc kubenswrapper[4747]: I1202 17:00:31.796353 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:00:31 crc kubenswrapper[4747]: I1202 17:00:31.796423 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 17:00:31 crc kubenswrapper[4747]: I1202 17:00:31.797218 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dd912523cc5101dd05f8356cae810078de73ddb22c7e5af9901013b7a8b2bc0c"} pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 17:00:31 crc kubenswrapper[4747]: I1202 17:00:31.797271 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" containerID="cri-o://dd912523cc5101dd05f8356cae810078de73ddb22c7e5af9901013b7a8b2bc0c" gracePeriod=600 Dec 02 17:00:33 crc kubenswrapper[4747]: I1202 17:00:33.370260 4747 generic.go:334] "Generic (PLEG): container finished" podID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerID="dd912523cc5101dd05f8356cae810078de73ddb22c7e5af9901013b7a8b2bc0c" exitCode=0 Dec 02 17:00:33 crc kubenswrapper[4747]: I1202 17:00:33.370346 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerDied","Data":"dd912523cc5101dd05f8356cae810078de73ddb22c7e5af9901013b7a8b2bc0c"} Dec 02 17:00:33 crc kubenswrapper[4747]: I1202 17:00:33.370841 4747 scope.go:117] "RemoveContainer" containerID="5bac94c471a07d1efa3cacb30b33cf6d0f493cee57eaf241d36618d258608c30" Dec 02 17:00:35 crc kubenswrapper[4747]: I1202 17:00:35.396034 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"3bcd3880ed49ebcd50724927f63a47903d028c91da930fb1b60778e7033a6140"} Dec 02 17:00:36 crc kubenswrapper[4747]: I1202 17:00:36.886751 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-85fbd69fcd-ns9p8" Dec 02 17:00:37 crc kubenswrapper[4747]: I1202 17:00:37.170512 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-58879495c-wg54q" Dec 02 17:00:37 crc kubenswrapper[4747]: I1202 17:00:37.593368 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-8f6687c44-lkxqf" Dec 02 17:00:37 crc kubenswrapper[4747]: I1202 17:00:37.934106 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-695797c565-mwsvx" Dec 02 17:00:38 crc kubenswrapper[4747]: I1202 17:00:38.070352 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-bb86466d8-zzcts" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.066570 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-sg8gv"] Dec 02 17:01:00 crc kubenswrapper[4747]: E1202 17:01:00.068154 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63ec13db-3dc2-442f-80ce-17fd41c3600b" containerName="collect-profiles" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.068182 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="63ec13db-3dc2-442f-80ce-17fd41c3600b" containerName="collect-profiles" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.068355 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="63ec13db-3dc2-442f-80ce-17fd41c3600b" containerName="collect-profiles" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.069363 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-sg8gv" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.079142 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.079219 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.079381 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-v6nr2" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.079439 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.088077 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-sg8gv"] Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.160764 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8q57d"] Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.163616 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-8q57d" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.170338 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.179382 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8q57d"] Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.210209 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qz2v\" (UniqueName: \"kubernetes.io/projected/467af242-db5d-49fd-a9cc-06d06d39c0b3-kube-api-access-7qz2v\") pod \"dnsmasq-dns-675f4bcbfc-sg8gv\" (UID: \"467af242-db5d-49fd-a9cc-06d06d39c0b3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-sg8gv" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.210312 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/467af242-db5d-49fd-a9cc-06d06d39c0b3-config\") pod \"dnsmasq-dns-675f4bcbfc-sg8gv\" (UID: \"467af242-db5d-49fd-a9cc-06d06d39c0b3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-sg8gv" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.311563 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qz2v\" (UniqueName: \"kubernetes.io/projected/467af242-db5d-49fd-a9cc-06d06d39c0b3-kube-api-access-7qz2v\") pod \"dnsmasq-dns-675f4bcbfc-sg8gv\" (UID: \"467af242-db5d-49fd-a9cc-06d06d39c0b3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-sg8gv" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.311668 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/467af242-db5d-49fd-a9cc-06d06d39c0b3-config\") pod \"dnsmasq-dns-675f4bcbfc-sg8gv\" (UID: \"467af242-db5d-49fd-a9cc-06d06d39c0b3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-sg8gv" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.311714 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-config\") pod \"dnsmasq-dns-78dd6ddcc-8q57d\" (UID: \"686f25cf-1526-4181-b3ad-eeb8c4e16ad6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8q57d" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.311779 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmprk\" (UniqueName: \"kubernetes.io/projected/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-kube-api-access-xmprk\") pod \"dnsmasq-dns-78dd6ddcc-8q57d\" (UID: \"686f25cf-1526-4181-b3ad-eeb8c4e16ad6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8q57d" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.311921 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-8q57d\" (UID: \"686f25cf-1526-4181-b3ad-eeb8c4e16ad6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8q57d" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.312854 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/467af242-db5d-49fd-a9cc-06d06d39c0b3-config\") pod \"dnsmasq-dns-675f4bcbfc-sg8gv\" (UID: \"467af242-db5d-49fd-a9cc-06d06d39c0b3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-sg8gv" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.353506 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qz2v\" (UniqueName: \"kubernetes.io/projected/467af242-db5d-49fd-a9cc-06d06d39c0b3-kube-api-access-7qz2v\") pod \"dnsmasq-dns-675f4bcbfc-sg8gv\" (UID: \"467af242-db5d-49fd-a9cc-06d06d39c0b3\") " pod="openstack/dnsmasq-dns-675f4bcbfc-sg8gv" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.409471 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-sg8gv" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.413421 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-config\") pod \"dnsmasq-dns-78dd6ddcc-8q57d\" (UID: \"686f25cf-1526-4181-b3ad-eeb8c4e16ad6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8q57d" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.413558 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmprk\" (UniqueName: \"kubernetes.io/projected/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-kube-api-access-xmprk\") pod \"dnsmasq-dns-78dd6ddcc-8q57d\" (UID: \"686f25cf-1526-4181-b3ad-eeb8c4e16ad6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8q57d" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.413616 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-8q57d\" (UID: \"686f25cf-1526-4181-b3ad-eeb8c4e16ad6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8q57d" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.414816 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-8q57d\" (UID: \"686f25cf-1526-4181-b3ad-eeb8c4e16ad6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8q57d" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.416298 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-config\") pod \"dnsmasq-dns-78dd6ddcc-8q57d\" (UID: \"686f25cf-1526-4181-b3ad-eeb8c4e16ad6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8q57d" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.438616 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmprk\" (UniqueName: \"kubernetes.io/projected/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-kube-api-access-xmprk\") pod \"dnsmasq-dns-78dd6ddcc-8q57d\" (UID: \"686f25cf-1526-4181-b3ad-eeb8c4e16ad6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8q57d" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.482764 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-8q57d" Dec 02 17:01:00 crc kubenswrapper[4747]: I1202 17:01:00.977471 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-sg8gv"] Dec 02 17:01:01 crc kubenswrapper[4747]: I1202 17:01:01.026080 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8q57d"] Dec 02 17:01:01 crc kubenswrapper[4747]: W1202 17:01:01.028969 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod686f25cf_1526_4181_b3ad_eeb8c4e16ad6.slice/crio-8e127b1a6b116d8f1bbb55e9bf7706b3a0dc24c420010ef5b8fcb9e2482a4bf3 WatchSource:0}: Error finding container 8e127b1a6b116d8f1bbb55e9bf7706b3a0dc24c420010ef5b8fcb9e2482a4bf3: Status 404 returned error can't find the container with id 8e127b1a6b116d8f1bbb55e9bf7706b3a0dc24c420010ef5b8fcb9e2482a4bf3 Dec 02 17:01:01 crc kubenswrapper[4747]: I1202 17:01:01.644286 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-8q57d" event={"ID":"686f25cf-1526-4181-b3ad-eeb8c4e16ad6","Type":"ContainerStarted","Data":"8e127b1a6b116d8f1bbb55e9bf7706b3a0dc24c420010ef5b8fcb9e2482a4bf3"} Dec 02 17:01:01 crc kubenswrapper[4747]: I1202 17:01:01.645400 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-sg8gv" event={"ID":"467af242-db5d-49fd-a9cc-06d06d39c0b3","Type":"ContainerStarted","Data":"1e971ce724aa1f4131d9898fc155422db0cae0cb3b18842f7b120142ecc8124f"} Dec 02 17:01:02 crc kubenswrapper[4747]: I1202 17:01:02.993043 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-sg8gv"] Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.029780 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q898h"] Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.031676 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-q898h" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.073544 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q898h"] Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.087531 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-dns-svc\") pod \"dnsmasq-dns-666b6646f7-q898h\" (UID: \"5fb917c8-4122-4bd3-8cf8-c0c06e782ea5\") " pod="openstack/dnsmasq-dns-666b6646f7-q898h" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.087639 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv4tw\" (UniqueName: \"kubernetes.io/projected/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-kube-api-access-dv4tw\") pod \"dnsmasq-dns-666b6646f7-q898h\" (UID: \"5fb917c8-4122-4bd3-8cf8-c0c06e782ea5\") " pod="openstack/dnsmasq-dns-666b6646f7-q898h" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.087676 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-config\") pod \"dnsmasq-dns-666b6646f7-q898h\" (UID: \"5fb917c8-4122-4bd3-8cf8-c0c06e782ea5\") " pod="openstack/dnsmasq-dns-666b6646f7-q898h" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.188530 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv4tw\" (UniqueName: \"kubernetes.io/projected/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-kube-api-access-dv4tw\") pod \"dnsmasq-dns-666b6646f7-q898h\" (UID: \"5fb917c8-4122-4bd3-8cf8-c0c06e782ea5\") " pod="openstack/dnsmasq-dns-666b6646f7-q898h" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.188587 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-config\") pod \"dnsmasq-dns-666b6646f7-q898h\" (UID: \"5fb917c8-4122-4bd3-8cf8-c0c06e782ea5\") " pod="openstack/dnsmasq-dns-666b6646f7-q898h" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.188656 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-dns-svc\") pod \"dnsmasq-dns-666b6646f7-q898h\" (UID: \"5fb917c8-4122-4bd3-8cf8-c0c06e782ea5\") " pod="openstack/dnsmasq-dns-666b6646f7-q898h" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.189827 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-dns-svc\") pod \"dnsmasq-dns-666b6646f7-q898h\" (UID: \"5fb917c8-4122-4bd3-8cf8-c0c06e782ea5\") " pod="openstack/dnsmasq-dns-666b6646f7-q898h" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.191170 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-config\") pod \"dnsmasq-dns-666b6646f7-q898h\" (UID: \"5fb917c8-4122-4bd3-8cf8-c0c06e782ea5\") " pod="openstack/dnsmasq-dns-666b6646f7-q898h" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.214153 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv4tw\" (UniqueName: \"kubernetes.io/projected/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-kube-api-access-dv4tw\") pod \"dnsmasq-dns-666b6646f7-q898h\" (UID: \"5fb917c8-4122-4bd3-8cf8-c0c06e782ea5\") " pod="openstack/dnsmasq-dns-666b6646f7-q898h" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.378441 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-q898h" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.409730 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8q57d"] Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.454759 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-lx4fx"] Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.457077 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-lx4fx" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.481841 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-lx4fx"] Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.596448 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-config\") pod \"dnsmasq-dns-57d769cc4f-lx4fx\" (UID: \"ee8ab716-dbff-4db7-b44f-de98c75f0dcf\") " pod="openstack/dnsmasq-dns-57d769cc4f-lx4fx" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.596512 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-lx4fx\" (UID: \"ee8ab716-dbff-4db7-b44f-de98c75f0dcf\") " pod="openstack/dnsmasq-dns-57d769cc4f-lx4fx" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.596597 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2k2g\" (UniqueName: \"kubernetes.io/projected/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-kube-api-access-q2k2g\") pod \"dnsmasq-dns-57d769cc4f-lx4fx\" (UID: \"ee8ab716-dbff-4db7-b44f-de98c75f0dcf\") " pod="openstack/dnsmasq-dns-57d769cc4f-lx4fx" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.698833 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2k2g\" (UniqueName: \"kubernetes.io/projected/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-kube-api-access-q2k2g\") pod \"dnsmasq-dns-57d769cc4f-lx4fx\" (UID: \"ee8ab716-dbff-4db7-b44f-de98c75f0dcf\") " pod="openstack/dnsmasq-dns-57d769cc4f-lx4fx" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.698972 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-config\") pod \"dnsmasq-dns-57d769cc4f-lx4fx\" (UID: \"ee8ab716-dbff-4db7-b44f-de98c75f0dcf\") " pod="openstack/dnsmasq-dns-57d769cc4f-lx4fx" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.699010 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-lx4fx\" (UID: \"ee8ab716-dbff-4db7-b44f-de98c75f0dcf\") " pod="openstack/dnsmasq-dns-57d769cc4f-lx4fx" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.700251 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-config\") pod \"dnsmasq-dns-57d769cc4f-lx4fx\" (UID: \"ee8ab716-dbff-4db7-b44f-de98c75f0dcf\") " pod="openstack/dnsmasq-dns-57d769cc4f-lx4fx" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.700385 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-lx4fx\" (UID: \"ee8ab716-dbff-4db7-b44f-de98c75f0dcf\") " pod="openstack/dnsmasq-dns-57d769cc4f-lx4fx" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.771007 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2k2g\" (UniqueName: \"kubernetes.io/projected/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-kube-api-access-q2k2g\") pod \"dnsmasq-dns-57d769cc4f-lx4fx\" (UID: \"ee8ab716-dbff-4db7-b44f-de98c75f0dcf\") " pod="openstack/dnsmasq-dns-57d769cc4f-lx4fx" Dec 02 17:01:03 crc kubenswrapper[4747]: I1202 17:01:03.899974 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-lx4fx" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.251646 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.253817 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.261833 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.261854 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.262205 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.262574 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.262887 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-4rnd4" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.263086 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.263825 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.291333 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.474830 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.474943 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-server-conf\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.475030 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1e1d5029-90ff-4315-8ba4-961286afbb54-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.475144 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.475227 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.475277 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.475668 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-config-data\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.475696 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.475722 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1e1d5029-90ff-4315-8ba4-961286afbb54-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.475771 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.475858 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmsrr\" (UniqueName: \"kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-kube-api-access-qmsrr\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.549710 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-lx4fx"] Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.577645 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.577774 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-server-conf\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.577813 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1e1d5029-90ff-4315-8ba4-961286afbb54-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.577974 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.578537 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.578579 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.578637 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-config-data\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.578664 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.578692 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1e1d5029-90ff-4315-8ba4-961286afbb54-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.578751 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.578803 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmsrr\" (UniqueName: \"kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-kube-api-access-qmsrr\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.579874 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-server-conf\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.580290 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.580669 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.581267 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-config-data\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.585249 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.585255 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.592283 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.593344 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1e1d5029-90ff-4315-8ba4-961286afbb54-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.596657 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.602351 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1e1d5029-90ff-4315-8ba4-961286afbb54-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.604685 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmsrr\" (UniqueName: \"kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-kube-api-access-qmsrr\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.648521 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " pod="openstack/rabbitmq-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.683008 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q898h"] Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.691731 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.693567 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: W1202 17:01:04.696465 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5fb917c8_4122_4bd3_8cf8_c0c06e782ea5.slice/crio-94a0b5c57e4439915acdc2da4a16880b575732b4666f8db5c2a521f2eaecef3c WatchSource:0}: Error finding container 94a0b5c57e4439915acdc2da4a16880b575732b4666f8db5c2a521f2eaecef3c: Status 404 returned error can't find the container with id 94a0b5c57e4439915acdc2da4a16880b575732b4666f8db5c2a521f2eaecef3c Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.697360 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.698615 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-4gzcv" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.698779 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.698895 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.698888 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.698882 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.704139 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.714214 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.733178 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-lx4fx" event={"ID":"ee8ab716-dbff-4db7-b44f-de98c75f0dcf","Type":"ContainerStarted","Data":"72d90b8c87965055f80829c2dc08a926399901f35f062118b988e7ec4572fff3"} Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.786078 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.786195 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.786244 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/84019d16-aa94-40fc-9615-45c7d3dcb7b3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.786290 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.786346 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhmbq\" (UniqueName: \"kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-kube-api-access-mhmbq\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.786396 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.786430 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.786505 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.786560 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.786590 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/84019d16-aa94-40fc-9615-45c7d3dcb7b3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.786617 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.889809 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.889861 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/84019d16-aa94-40fc-9615-45c7d3dcb7b3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.889887 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.890035 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.890231 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.890267 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/84019d16-aa94-40fc-9615-45c7d3dcb7b3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.890340 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.890384 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.890402 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhmbq\" (UniqueName: \"kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-kube-api-access-mhmbq\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.891632 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.891797 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.891888 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.892173 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.892881 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.894950 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.895214 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.896231 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/84019d16-aa94-40fc-9615-45c7d3dcb7b3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.898042 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.903945 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/84019d16-aa94-40fc-9615-45c7d3dcb7b3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.905927 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.906559 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.918801 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhmbq\" (UniqueName: \"kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-kube-api-access-mhmbq\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.925751 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:04 crc kubenswrapper[4747]: I1202 17:01:04.946162 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 17:01:05 crc kubenswrapper[4747]: I1202 17:01:05.055788 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:01:05 crc kubenswrapper[4747]: I1202 17:01:05.535540 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 17:01:05 crc kubenswrapper[4747]: W1202 17:01:05.598614 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1e1d5029_90ff_4315_8ba4_961286afbb54.slice/crio-fce135190c0b0ac9cc8e3dde031baaa97ae0943884a5d83f2f745f4bfa8ccbbb WatchSource:0}: Error finding container fce135190c0b0ac9cc8e3dde031baaa97ae0943884a5d83f2f745f4bfa8ccbbb: Status 404 returned error can't find the container with id fce135190c0b0ac9cc8e3dde031baaa97ae0943884a5d83f2f745f4bfa8ccbbb Dec 02 17:01:05 crc kubenswrapper[4747]: I1202 17:01:05.781947 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1e1d5029-90ff-4315-8ba4-961286afbb54","Type":"ContainerStarted","Data":"fce135190c0b0ac9cc8e3dde031baaa97ae0943884a5d83f2f745f4bfa8ccbbb"} Dec 02 17:01:05 crc kubenswrapper[4747]: I1202 17:01:05.782004 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-q898h" event={"ID":"5fb917c8-4122-4bd3-8cf8-c0c06e782ea5","Type":"ContainerStarted","Data":"94a0b5c57e4439915acdc2da4a16880b575732b4666f8db5c2a521f2eaecef3c"} Dec 02 17:01:05 crc kubenswrapper[4747]: I1202 17:01:05.850026 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 17:01:05 crc kubenswrapper[4747]: W1202 17:01:05.894813 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84019d16_aa94_40fc_9615_45c7d3dcb7b3.slice/crio-4967b8694de652e0401a6cd52fd2c37fa5856662b42954b5bd5cc00474d9d60e WatchSource:0}: Error finding container 4967b8694de652e0401a6cd52fd2c37fa5856662b42954b5bd5cc00474d9d60e: Status 404 returned error can't find the container with id 4967b8694de652e0401a6cd52fd2c37fa5856662b42954b5bd5cc00474d9d60e Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.152074 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.154539 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.158657 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.158949 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.159649 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.159875 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-q58w6" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.160989 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.165505 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.168407 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.515632 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c1e65482-6e30-4e82-8c20-5fd991675dba-secrets\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.515711 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.515788 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c1e65482-6e30-4e82-8c20-5fd991675dba-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.515833 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e65482-6e30-4e82-8c20-5fd991675dba-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.515856 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1e65482-6e30-4e82-8c20-5fd991675dba-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.515884 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c1e65482-6e30-4e82-8c20-5fd991675dba-config-data-default\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.515933 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p57n\" (UniqueName: \"kubernetes.io/projected/c1e65482-6e30-4e82-8c20-5fd991675dba-kube-api-access-5p57n\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.515962 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1e65482-6e30-4e82-8c20-5fd991675dba-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.515986 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c1e65482-6e30-4e82-8c20-5fd991675dba-kolla-config\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.618919 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c1e65482-6e30-4e82-8c20-5fd991675dba-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.619031 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1e65482-6e30-4e82-8c20-5fd991675dba-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.619054 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e65482-6e30-4e82-8c20-5fd991675dba-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.619083 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c1e65482-6e30-4e82-8c20-5fd991675dba-config-data-default\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.619132 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p57n\" (UniqueName: \"kubernetes.io/projected/c1e65482-6e30-4e82-8c20-5fd991675dba-kube-api-access-5p57n\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.619164 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1e65482-6e30-4e82-8c20-5fd991675dba-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.619202 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c1e65482-6e30-4e82-8c20-5fd991675dba-kolla-config\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.619239 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c1e65482-6e30-4e82-8c20-5fd991675dba-secrets\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.619302 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.619817 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.619922 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c1e65482-6e30-4e82-8c20-5fd991675dba-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.621291 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c1e65482-6e30-4e82-8c20-5fd991675dba-kolla-config\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.621690 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c1e65482-6e30-4e82-8c20-5fd991675dba-config-data-default\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.624232 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1e65482-6e30-4e82-8c20-5fd991675dba-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.635986 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/c1e65482-6e30-4e82-8c20-5fd991675dba-secrets\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.640239 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1e65482-6e30-4e82-8c20-5fd991675dba-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.644730 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p57n\" (UniqueName: \"kubernetes.io/projected/c1e65482-6e30-4e82-8c20-5fd991675dba-kube-api-access-5p57n\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.656195 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e65482-6e30-4e82-8c20-5fd991675dba-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.679012 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"c1e65482-6e30-4e82-8c20-5fd991675dba\") " pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.736565 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 02 17:01:06 crc kubenswrapper[4747]: I1202 17:01:06.823999 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"84019d16-aa94-40fc-9615-45c7d3dcb7b3","Type":"ContainerStarted","Data":"4967b8694de652e0401a6cd52fd2c37fa5856662b42954b5bd5cc00474d9d60e"} Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.486114 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 02 17:01:07 crc kubenswrapper[4747]: W1202 17:01:07.549213 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1e65482_6e30_4e82_8c20_5fd991675dba.slice/crio-43ed3660c74dcbf6ffa1fd6e82ab187903e468433723410ec302bb3bf416aa85 WatchSource:0}: Error finding container 43ed3660c74dcbf6ffa1fd6e82ab187903e468433723410ec302bb3bf416aa85: Status 404 returned error can't find the container with id 43ed3660c74dcbf6ffa1fd6e82ab187903e468433723410ec302bb3bf416aa85 Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.748711 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.756916 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.766472 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.766803 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.766995 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-cd8m8" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.767021 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.819553 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.819600 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.823480 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.831204 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.831591 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-b9x5c" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.831762 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.860770 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.860862 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.860936 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.860962 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.861043 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.861102 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5wvm\" (UniqueName: \"kubernetes.io/projected/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-kube-api-access-t5wvm\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.861201 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.861280 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.861358 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.877157 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.893103 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c1e65482-6e30-4e82-8c20-5fd991675dba","Type":"ContainerStarted","Data":"43ed3660c74dcbf6ffa1fd6e82ab187903e468433723410ec302bb3bf416aa85"} Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.962925 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.963001 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5wvm\" (UniqueName: \"kubernetes.io/projected/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-kube-api-access-t5wvm\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.963038 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.963090 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/11fbad36-d913-4a80-b1db-6f9707f8c370-kolla-config\") pod \"memcached-0\" (UID: \"11fbad36-d913-4a80-b1db-6f9707f8c370\") " pod="openstack/memcached-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.963129 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.963204 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/11fbad36-d913-4a80-b1db-6f9707f8c370-memcached-tls-certs\") pod \"memcached-0\" (UID: \"11fbad36-d913-4a80-b1db-6f9707f8c370\") " pod="openstack/memcached-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.963226 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pn7mc\" (UniqueName: \"kubernetes.io/projected/11fbad36-d913-4a80-b1db-6f9707f8c370-kube-api-access-pn7mc\") pod \"memcached-0\" (UID: \"11fbad36-d913-4a80-b1db-6f9707f8c370\") " pod="openstack/memcached-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.963293 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.963369 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11fbad36-d913-4a80-b1db-6f9707f8c370-combined-ca-bundle\") pod \"memcached-0\" (UID: \"11fbad36-d913-4a80-b1db-6f9707f8c370\") " pod="openstack/memcached-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.963424 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.963505 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.963621 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.964076 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.964373 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.965098 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.965554 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.965649 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/11fbad36-d913-4a80-b1db-6f9707f8c370-config-data\") pod \"memcached-0\" (UID: \"11fbad36-d913-4a80-b1db-6f9707f8c370\") " pod="openstack/memcached-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.965823 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.966228 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.971834 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.987983 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.993677 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:07 crc kubenswrapper[4747]: I1202 17:01:07.997832 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5wvm\" (UniqueName: \"kubernetes.io/projected/de9ec28c-1521-4af6-8473-fe8bf1cabf5d-kube-api-access-t5wvm\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:08 crc kubenswrapper[4747]: I1202 17:01:08.024095 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"de9ec28c-1521-4af6-8473-fe8bf1cabf5d\") " pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:08 crc kubenswrapper[4747]: I1202 17:01:08.069814 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/11fbad36-d913-4a80-b1db-6f9707f8c370-config-data\") pod \"memcached-0\" (UID: \"11fbad36-d913-4a80-b1db-6f9707f8c370\") " pod="openstack/memcached-0" Dec 02 17:01:08 crc kubenswrapper[4747]: I1202 17:01:08.069948 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/11fbad36-d913-4a80-b1db-6f9707f8c370-kolla-config\") pod \"memcached-0\" (UID: \"11fbad36-d913-4a80-b1db-6f9707f8c370\") " pod="openstack/memcached-0" Dec 02 17:01:08 crc kubenswrapper[4747]: I1202 17:01:08.070021 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/11fbad36-d913-4a80-b1db-6f9707f8c370-memcached-tls-certs\") pod \"memcached-0\" (UID: \"11fbad36-d913-4a80-b1db-6f9707f8c370\") " pod="openstack/memcached-0" Dec 02 17:01:08 crc kubenswrapper[4747]: I1202 17:01:08.070049 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pn7mc\" (UniqueName: \"kubernetes.io/projected/11fbad36-d913-4a80-b1db-6f9707f8c370-kube-api-access-pn7mc\") pod \"memcached-0\" (UID: \"11fbad36-d913-4a80-b1db-6f9707f8c370\") " pod="openstack/memcached-0" Dec 02 17:01:08 crc kubenswrapper[4747]: I1202 17:01:08.071088 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11fbad36-d913-4a80-b1db-6f9707f8c370-combined-ca-bundle\") pod \"memcached-0\" (UID: \"11fbad36-d913-4a80-b1db-6f9707f8c370\") " pod="openstack/memcached-0" Dec 02 17:01:08 crc kubenswrapper[4747]: I1202 17:01:08.074377 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/11fbad36-d913-4a80-b1db-6f9707f8c370-kolla-config\") pod \"memcached-0\" (UID: \"11fbad36-d913-4a80-b1db-6f9707f8c370\") " pod="openstack/memcached-0" Dec 02 17:01:08 crc kubenswrapper[4747]: I1202 17:01:08.075777 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/11fbad36-d913-4a80-b1db-6f9707f8c370-config-data\") pod \"memcached-0\" (UID: \"11fbad36-d913-4a80-b1db-6f9707f8c370\") " pod="openstack/memcached-0" Dec 02 17:01:08 crc kubenswrapper[4747]: I1202 17:01:08.083545 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11fbad36-d913-4a80-b1db-6f9707f8c370-combined-ca-bundle\") pod \"memcached-0\" (UID: \"11fbad36-d913-4a80-b1db-6f9707f8c370\") " pod="openstack/memcached-0" Dec 02 17:01:08 crc kubenswrapper[4747]: I1202 17:01:08.099642 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/11fbad36-d913-4a80-b1db-6f9707f8c370-memcached-tls-certs\") pod \"memcached-0\" (UID: \"11fbad36-d913-4a80-b1db-6f9707f8c370\") " pod="openstack/memcached-0" Dec 02 17:01:08 crc kubenswrapper[4747]: I1202 17:01:08.103782 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pn7mc\" (UniqueName: \"kubernetes.io/projected/11fbad36-d913-4a80-b1db-6f9707f8c370-kube-api-access-pn7mc\") pod \"memcached-0\" (UID: \"11fbad36-d913-4a80-b1db-6f9707f8c370\") " pod="openstack/memcached-0" Dec 02 17:01:08 crc kubenswrapper[4747]: I1202 17:01:08.119262 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:08 crc kubenswrapper[4747]: I1202 17:01:08.172589 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 02 17:01:09 crc kubenswrapper[4747]: I1202 17:01:09.529221 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 02 17:01:09 crc kubenswrapper[4747]: I1202 17:01:09.603899 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 02 17:01:09 crc kubenswrapper[4747]: I1202 17:01:09.983317 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 17:01:09 crc kubenswrapper[4747]: I1202 17:01:09.984742 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 17:01:09 crc kubenswrapper[4747]: I1202 17:01:09.984878 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 17:01:09 crc kubenswrapper[4747]: I1202 17:01:09.988349 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-5fxpc" Dec 02 17:01:10 crc kubenswrapper[4747]: I1202 17:01:10.001154 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"11fbad36-d913-4a80-b1db-6f9707f8c370","Type":"ContainerStarted","Data":"bb1cd57ede406151c0e3e861ac711c9515e2ecd73162eaf81f06aefa1aab1ad1"} Dec 02 17:01:10 crc kubenswrapper[4747]: I1202 17:01:10.005095 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"de9ec28c-1521-4af6-8473-fe8bf1cabf5d","Type":"ContainerStarted","Data":"31e2aac24d079612895bccf111b3ef904ca6bd17dd40bab53ef6c4cb2ed0e979"} Dec 02 17:01:10 crc kubenswrapper[4747]: I1202 17:01:10.086433 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bd6s\" (UniqueName: \"kubernetes.io/projected/fc2871c5-3506-4ae4-9881-8c184d57c2e1-kube-api-access-6bd6s\") pod \"kube-state-metrics-0\" (UID: \"fc2871c5-3506-4ae4-9881-8c184d57c2e1\") " pod="openstack/kube-state-metrics-0" Dec 02 17:01:10 crc kubenswrapper[4747]: I1202 17:01:10.210342 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bd6s\" (UniqueName: \"kubernetes.io/projected/fc2871c5-3506-4ae4-9881-8c184d57c2e1-kube-api-access-6bd6s\") pod \"kube-state-metrics-0\" (UID: \"fc2871c5-3506-4ae4-9881-8c184d57c2e1\") " pod="openstack/kube-state-metrics-0" Dec 02 17:01:10 crc kubenswrapper[4747]: I1202 17:01:10.341210 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bd6s\" (UniqueName: \"kubernetes.io/projected/fc2871c5-3506-4ae4-9881-8c184d57c2e1-kube-api-access-6bd6s\") pod \"kube-state-metrics-0\" (UID: \"fc2871c5-3506-4ae4-9881-8c184d57c2e1\") " pod="openstack/kube-state-metrics-0" Dec 02 17:01:10 crc kubenswrapper[4747]: I1202 17:01:10.348886 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 17:01:11 crc kubenswrapper[4747]: I1202 17:01:11.970269 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.695614 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-lqt7r"] Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.697481 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.704732 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-ffhqk" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.705967 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.706438 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.711769 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lqt7r"] Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.719726 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-hbnrp"] Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.722208 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.727425 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-hbnrp"] Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.786881 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-scripts\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.788435 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnb8h\" (UniqueName: \"kubernetes.io/projected/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-kube-api-access-fnb8h\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.788646 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-var-log\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.788780 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-scripts\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.789007 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-var-run\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.789132 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-ovn-controller-tls-certs\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.789325 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwl74\" (UniqueName: \"kubernetes.io/projected/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-kube-api-access-gwl74\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.789538 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-combined-ca-bundle\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.789761 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-var-log-ovn\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.790022 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-var-run\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.790195 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-var-run-ovn\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.790367 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-var-lib\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.790487 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-etc-ovs\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.892041 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-var-run\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.892105 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-var-run-ovn\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.892135 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-var-lib\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.892161 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-etc-ovs\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.892231 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-scripts\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.892259 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnb8h\" (UniqueName: \"kubernetes.io/projected/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-kube-api-access-fnb8h\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.892281 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-var-log\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.892298 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-scripts\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.892333 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-var-run\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.892356 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-ovn-controller-tls-certs\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.892384 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwl74\" (UniqueName: \"kubernetes.io/projected/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-kube-api-access-gwl74\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.892427 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-combined-ca-bundle\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.892465 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-var-log-ovn\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.892878 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-var-run\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.892931 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-var-run\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.892974 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-var-lib\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.893098 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-var-run-ovn\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.893142 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-var-log-ovn\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.893415 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-var-log\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.896196 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-etc-ovs\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.897198 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-scripts\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.901048 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-scripts\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.903922 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-ovn-controller-tls-certs\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.904521 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-combined-ca-bundle\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.919791 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwl74\" (UniqueName: \"kubernetes.io/projected/ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020-kube-api-access-gwl74\") pod \"ovn-controller-lqt7r\" (UID: \"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020\") " pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:13 crc kubenswrapper[4747]: I1202 17:01:13.925564 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnb8h\" (UniqueName: \"kubernetes.io/projected/46dfbb1c-1b65-4f5d-9087-94743cb4c00e-kube-api-access-fnb8h\") pod \"ovn-controller-ovs-hbnrp\" (UID: \"46dfbb1c-1b65-4f5d-9087-94743cb4c00e\") " pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.023446 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.058871 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.157330 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.158956 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.164865 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.165268 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.165488 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.165690 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.165996 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-n6tc8" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.177690 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.300177 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a8b10db-030f-4419-9fa8-c500ae646151-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.300261 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a8b10db-030f-4419-9fa8-c500ae646151-config\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.300303 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5a8b10db-030f-4419-9fa8-c500ae646151-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.300967 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a8b10db-030f-4419-9fa8-c500ae646151-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.301079 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a8b10db-030f-4419-9fa8-c500ae646151-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.301128 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bbm8\" (UniqueName: \"kubernetes.io/projected/5a8b10db-030f-4419-9fa8-c500ae646151-kube-api-access-5bbm8\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.301310 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.301425 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a8b10db-030f-4419-9fa8-c500ae646151-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.403524 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a8b10db-030f-4419-9fa8-c500ae646151-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.404099 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a8b10db-030f-4419-9fa8-c500ae646151-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.404157 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a8b10db-030f-4419-9fa8-c500ae646151-config\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.404198 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5a8b10db-030f-4419-9fa8-c500ae646151-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.404233 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a8b10db-030f-4419-9fa8-c500ae646151-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.404253 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a8b10db-030f-4419-9fa8-c500ae646151-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.404277 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bbm8\" (UniqueName: \"kubernetes.io/projected/5a8b10db-030f-4419-9fa8-c500ae646151-kube-api-access-5bbm8\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.404314 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.404733 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.413530 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5a8b10db-030f-4419-9fa8-c500ae646151-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.414547 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5a8b10db-030f-4419-9fa8-c500ae646151-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.450602 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a8b10db-030f-4419-9fa8-c500ae646151-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.451489 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a8b10db-030f-4419-9fa8-c500ae646151-config\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.452986 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a8b10db-030f-4419-9fa8-c500ae646151-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.460227 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a8b10db-030f-4419-9fa8-c500ae646151-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.461466 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.480159 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bbm8\" (UniqueName: \"kubernetes.io/projected/5a8b10db-030f-4419-9fa8-c500ae646151-kube-api-access-5bbm8\") pod \"ovsdbserver-sb-0\" (UID: \"5a8b10db-030f-4419-9fa8-c500ae646151\") " pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:14 crc kubenswrapper[4747]: I1202 17:01:14.498143 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.132515 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"fc2871c5-3506-4ae4-9881-8c184d57c2e1","Type":"ContainerStarted","Data":"178f8f0688b5065ec91692235aef484a5b61d7b0be13d0ee4f19cca388410455"} Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.309081 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.313932 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.320233 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-489xm" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.320633 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.320823 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.320898 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.337043 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.368865 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jr67c\" (UniqueName: \"kubernetes.io/projected/eb9eddf0-4be4-48f8-bcfe-083310ec9333-kube-api-access-jr67c\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.370084 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.370213 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb9eddf0-4be4-48f8-bcfe-083310ec9333-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.370513 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb9eddf0-4be4-48f8-bcfe-083310ec9333-config\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.370886 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb9eddf0-4be4-48f8-bcfe-083310ec9333-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.371020 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eb9eddf0-4be4-48f8-bcfe-083310ec9333-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.371073 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb9eddf0-4be4-48f8-bcfe-083310ec9333-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.371307 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/eb9eddf0-4be4-48f8-bcfe-083310ec9333-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.473829 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb9eddf0-4be4-48f8-bcfe-083310ec9333-config\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.473969 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb9eddf0-4be4-48f8-bcfe-083310ec9333-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.474025 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eb9eddf0-4be4-48f8-bcfe-083310ec9333-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.474045 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb9eddf0-4be4-48f8-bcfe-083310ec9333-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.474094 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/eb9eddf0-4be4-48f8-bcfe-083310ec9333-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.474133 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jr67c\" (UniqueName: \"kubernetes.io/projected/eb9eddf0-4be4-48f8-bcfe-083310ec9333-kube-api-access-jr67c\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.474164 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.474180 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb9eddf0-4be4-48f8-bcfe-083310ec9333-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.483396 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb9eddf0-4be4-48f8-bcfe-083310ec9333-config\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.487241 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb9eddf0-4be4-48f8-bcfe-083310ec9333-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.488062 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eb9eddf0-4be4-48f8-bcfe-083310ec9333-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.490826 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb9eddf0-4be4-48f8-bcfe-083310ec9333-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.491372 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/eb9eddf0-4be4-48f8-bcfe-083310ec9333-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.492258 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.521520 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.525131 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb9eddf0-4be4-48f8-bcfe-083310ec9333-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.541305 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jr67c\" (UniqueName: \"kubernetes.io/projected/eb9eddf0-4be4-48f8-bcfe-083310ec9333-kube-api-access-jr67c\") pod \"ovsdbserver-nb-0\" (UID: \"eb9eddf0-4be4-48f8-bcfe-083310ec9333\") " pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:17 crc kubenswrapper[4747]: I1202 17:01:17.653120 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:27 crc kubenswrapper[4747]: E1202 17:01:27.338351 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 02 17:01:27 crc kubenswrapper[4747]: E1202 17:01:27.339197 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mhmbq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(84019d16-aa94-40fc-9615-45c7d3dcb7b3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 17:01:27 crc kubenswrapper[4747]: E1202 17:01:27.340412 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="84019d16-aa94-40fc-9615-45c7d3dcb7b3" Dec 02 17:01:27 crc kubenswrapper[4747]: E1202 17:01:27.355225 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 02 17:01:27 crc kubenswrapper[4747]: E1202 17:01:27.355501 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qmsrr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(1e1d5029-90ff-4315-8ba4-961286afbb54): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 17:01:27 crc kubenswrapper[4747]: E1202 17:01:27.356878 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="1e1d5029-90ff-4315-8ba4-961286afbb54" Dec 02 17:01:28 crc kubenswrapper[4747]: E1202 17:01:28.225295 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="84019d16-aa94-40fc-9615-45c7d3dcb7b3" Dec 02 17:01:28 crc kubenswrapper[4747]: E1202 17:01:28.225727 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="1e1d5029-90ff-4315-8ba4-961286afbb54" Dec 02 17:01:31 crc kubenswrapper[4747]: E1202 17:01:31.150229 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 02 17:01:31 crc kubenswrapper[4747]: E1202 17:01:31.150741 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7qz2v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-sg8gv_openstack(467af242-db5d-49fd-a9cc-06d06d39c0b3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 17:01:31 crc kubenswrapper[4747]: E1202 17:01:31.153509 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-sg8gv" podUID="467af242-db5d-49fd-a9cc-06d06d39c0b3" Dec 02 17:01:31 crc kubenswrapper[4747]: E1202 17:01:31.233346 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 02 17:01:31 crc kubenswrapper[4747]: E1202 17:01:31.233621 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q2k2g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-lx4fx_openstack(ee8ab716-dbff-4db7-b44f-de98c75f0dcf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 17:01:31 crc kubenswrapper[4747]: E1202 17:01:31.234900 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-lx4fx" podUID="ee8ab716-dbff-4db7-b44f-de98c75f0dcf" Dec 02 17:01:31 crc kubenswrapper[4747]: E1202 17:01:31.252680 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-lx4fx" podUID="ee8ab716-dbff-4db7-b44f-de98c75f0dcf" Dec 02 17:01:31 crc kubenswrapper[4747]: E1202 17:01:31.301957 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 02 17:01:31 crc kubenswrapper[4747]: E1202 17:01:31.302177 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xmprk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-8q57d_openstack(686f25cf-1526-4181-b3ad-eeb8c4e16ad6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 17:01:31 crc kubenswrapper[4747]: E1202 17:01:31.304222 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-8q57d" podUID="686f25cf-1526-4181-b3ad-eeb8c4e16ad6" Dec 02 17:01:31 crc kubenswrapper[4747]: E1202 17:01:31.340334 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 02 17:01:31 crc kubenswrapper[4747]: E1202 17:01:31.340784 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dv4tw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-q898h_openstack(5fb917c8-4122-4bd3-8cf8-c0c06e782ea5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 17:01:31 crc kubenswrapper[4747]: E1202 17:01:31.342129 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-q898h" podUID="5fb917c8-4122-4bd3-8cf8-c0c06e782ea5" Dec 02 17:01:31 crc kubenswrapper[4747]: I1202 17:01:31.935062 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-hbnrp"] Dec 02 17:01:31 crc kubenswrapper[4747]: I1202 17:01:31.951304 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lqt7r"] Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.030344 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.040517 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-sg8gv" Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.071400 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qz2v\" (UniqueName: \"kubernetes.io/projected/467af242-db5d-49fd-a9cc-06d06d39c0b3-kube-api-access-7qz2v\") pod \"467af242-db5d-49fd-a9cc-06d06d39c0b3\" (UID: \"467af242-db5d-49fd-a9cc-06d06d39c0b3\") " Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.071685 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/467af242-db5d-49fd-a9cc-06d06d39c0b3-config\") pod \"467af242-db5d-49fd-a9cc-06d06d39c0b3\" (UID: \"467af242-db5d-49fd-a9cc-06d06d39c0b3\") " Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.072873 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/467af242-db5d-49fd-a9cc-06d06d39c0b3-config" (OuterVolumeSpecName: "config") pod "467af242-db5d-49fd-a9cc-06d06d39c0b3" (UID: "467af242-db5d-49fd-a9cc-06d06d39c0b3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.077425 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/467af242-db5d-49fd-a9cc-06d06d39c0b3-kube-api-access-7qz2v" (OuterVolumeSpecName: "kube-api-access-7qz2v") pod "467af242-db5d-49fd-a9cc-06d06d39c0b3" (UID: "467af242-db5d-49fd-a9cc-06d06d39c0b3"). InnerVolumeSpecName "kube-api-access-7qz2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:01:32 crc kubenswrapper[4747]: W1202 17:01:32.130503 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46dfbb1c_1b65_4f5d_9087_94743cb4c00e.slice/crio-518bf3720fff7ebabee8d6329267b18efd1b8055d4d2d6dcbc2eb77ac81379a4 WatchSource:0}: Error finding container 518bf3720fff7ebabee8d6329267b18efd1b8055d4d2d6dcbc2eb77ac81379a4: Status 404 returned error can't find the container with id 518bf3720fff7ebabee8d6329267b18efd1b8055d4d2d6dcbc2eb77ac81379a4 Dec 02 17:01:32 crc kubenswrapper[4747]: W1202 17:01:32.131044 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podecb5b4a2_0d03_464e_87f0_7bbb1cc6f020.slice/crio-aa6222ee9b9d7fe9b6be525759068f80040c0cd6f6398baf14d74a18570a4da7 WatchSource:0}: Error finding container aa6222ee9b9d7fe9b6be525759068f80040c0cd6f6398baf14d74a18570a4da7: Status 404 returned error can't find the container with id aa6222ee9b9d7fe9b6be525759068f80040c0cd6f6398baf14d74a18570a4da7 Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.173502 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/467af242-db5d-49fd-a9cc-06d06d39c0b3-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.173550 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qz2v\" (UniqueName: \"kubernetes.io/projected/467af242-db5d-49fd-a9cc-06d06d39c0b3-kube-api-access-7qz2v\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:32 crc kubenswrapper[4747]: W1202 17:01:32.216246 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb9eddf0_4be4_48f8_bcfe_083310ec9333.slice/crio-6dd245dc37ec60da1bcd6dd0097c1316cf2e75b9773cbfc79915af6f03114896 WatchSource:0}: Error finding container 6dd245dc37ec60da1bcd6dd0097c1316cf2e75b9773cbfc79915af6f03114896: Status 404 returned error can't find the container with id 6dd245dc37ec60da1bcd6dd0097c1316cf2e75b9773cbfc79915af6f03114896 Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.261093 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"11fbad36-d913-4a80-b1db-6f9707f8c370","Type":"ContainerStarted","Data":"6ff0ae317e844f6d473960042b5fe7923e2f73712da33cd321dd92973b199049"} Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.262025 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.262667 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lqt7r" event={"ID":"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020","Type":"ContainerStarted","Data":"aa6222ee9b9d7fe9b6be525759068f80040c0cd6f6398baf14d74a18570a4da7"} Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.265683 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-sg8gv" event={"ID":"467af242-db5d-49fd-a9cc-06d06d39c0b3","Type":"ContainerDied","Data":"1e971ce724aa1f4131d9898fc155422db0cae0cb3b18842f7b120142ecc8124f"} Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.265791 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-sg8gv" Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.268988 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c1e65482-6e30-4e82-8c20-5fd991675dba","Type":"ContainerStarted","Data":"1032b52667a29caf97803b6352866579b779a707ef23ea8749fbeba61113fdc3"} Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.271062 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hbnrp" event={"ID":"46dfbb1c-1b65-4f5d-9087-94743cb4c00e","Type":"ContainerStarted","Data":"518bf3720fff7ebabee8d6329267b18efd1b8055d4d2d6dcbc2eb77ac81379a4"} Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.273866 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"de9ec28c-1521-4af6-8473-fe8bf1cabf5d","Type":"ContainerStarted","Data":"475f928b37f0d950f812e8573b28652df496104f236973725f985612d55ab2ab"} Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.281702 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=3.598457158 podStartE2EDuration="25.281681119s" podCreationTimestamp="2025-12-02 17:01:07 +0000 UTC" firstStartedPulling="2025-12-02 17:01:09.62896685 +0000 UTC m=+1100.155855599" lastFinishedPulling="2025-12-02 17:01:31.312190811 +0000 UTC m=+1121.839079560" observedRunningTime="2025-12-02 17:01:32.27676807 +0000 UTC m=+1122.803656839" watchObservedRunningTime="2025-12-02 17:01:32.281681119 +0000 UTC m=+1122.808569868" Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.286167 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"eb9eddf0-4be4-48f8-bcfe-083310ec9333","Type":"ContainerStarted","Data":"6dd245dc37ec60da1bcd6dd0097c1316cf2e75b9773cbfc79915af6f03114896"} Dec 02 17:01:32 crc kubenswrapper[4747]: E1202 17:01:32.288575 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-q898h" podUID="5fb917c8-4122-4bd3-8cf8-c0c06e782ea5" Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.382764 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-sg8gv"] Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.396343 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-sg8gv"] Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.713290 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.744712 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-8q57d" Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.909455 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-dns-svc\") pod \"686f25cf-1526-4181-b3ad-eeb8c4e16ad6\" (UID: \"686f25cf-1526-4181-b3ad-eeb8c4e16ad6\") " Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.909579 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmprk\" (UniqueName: \"kubernetes.io/projected/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-kube-api-access-xmprk\") pod \"686f25cf-1526-4181-b3ad-eeb8c4e16ad6\" (UID: \"686f25cf-1526-4181-b3ad-eeb8c4e16ad6\") " Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.909661 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-config\") pod \"686f25cf-1526-4181-b3ad-eeb8c4e16ad6\" (UID: \"686f25cf-1526-4181-b3ad-eeb8c4e16ad6\") " Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.910864 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-config" (OuterVolumeSpecName: "config") pod "686f25cf-1526-4181-b3ad-eeb8c4e16ad6" (UID: "686f25cf-1526-4181-b3ad-eeb8c4e16ad6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.910994 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "686f25cf-1526-4181-b3ad-eeb8c4e16ad6" (UID: "686f25cf-1526-4181-b3ad-eeb8c4e16ad6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:01:32 crc kubenswrapper[4747]: I1202 17:01:32.925213 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-kube-api-access-xmprk" (OuterVolumeSpecName: "kube-api-access-xmprk") pod "686f25cf-1526-4181-b3ad-eeb8c4e16ad6" (UID: "686f25cf-1526-4181-b3ad-eeb8c4e16ad6"). InnerVolumeSpecName "kube-api-access-xmprk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:01:33 crc kubenswrapper[4747]: I1202 17:01:33.011268 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:33 crc kubenswrapper[4747]: I1202 17:01:33.011309 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmprk\" (UniqueName: \"kubernetes.io/projected/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-kube-api-access-xmprk\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:33 crc kubenswrapper[4747]: I1202 17:01:33.011323 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/686f25cf-1526-4181-b3ad-eeb8c4e16ad6-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:33 crc kubenswrapper[4747]: I1202 17:01:33.295848 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"5a8b10db-030f-4419-9fa8-c500ae646151","Type":"ContainerStarted","Data":"cd853b65b075a08156123046dec1c4483c629b8cc780c42e2d36de5af8126b77"} Dec 02 17:01:33 crc kubenswrapper[4747]: I1202 17:01:33.297843 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-8q57d" Dec 02 17:01:33 crc kubenswrapper[4747]: I1202 17:01:33.297719 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-8q57d" event={"ID":"686f25cf-1526-4181-b3ad-eeb8c4e16ad6","Type":"ContainerDied","Data":"8e127b1a6b116d8f1bbb55e9bf7706b3a0dc24c420010ef5b8fcb9e2482a4bf3"} Dec 02 17:01:33 crc kubenswrapper[4747]: I1202 17:01:33.356106 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8q57d"] Dec 02 17:01:33 crc kubenswrapper[4747]: I1202 17:01:33.368571 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8q57d"] Dec 02 17:01:33 crc kubenswrapper[4747]: I1202 17:01:33.773341 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="467af242-db5d-49fd-a9cc-06d06d39c0b3" path="/var/lib/kubelet/pods/467af242-db5d-49fd-a9cc-06d06d39c0b3/volumes" Dec 02 17:01:33 crc kubenswrapper[4747]: I1202 17:01:33.774057 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="686f25cf-1526-4181-b3ad-eeb8c4e16ad6" path="/var/lib/kubelet/pods/686f25cf-1526-4181-b3ad-eeb8c4e16ad6/volumes" Dec 02 17:01:34 crc kubenswrapper[4747]: I1202 17:01:34.310380 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"fc2871c5-3506-4ae4-9881-8c184d57c2e1","Type":"ContainerStarted","Data":"c05c72e5c32d5b462b338877cf90a47a6481775e0f0255c1117409d13c816105"} Dec 02 17:01:34 crc kubenswrapper[4747]: I1202 17:01:34.325528 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=8.545777126 podStartE2EDuration="25.325500915s" podCreationTimestamp="2025-12-02 17:01:09 +0000 UTC" firstStartedPulling="2025-12-02 17:01:16.531445897 +0000 UTC m=+1107.058334646" lastFinishedPulling="2025-12-02 17:01:33.311169686 +0000 UTC m=+1123.838058435" observedRunningTime="2025-12-02 17:01:34.324382313 +0000 UTC m=+1124.851271072" watchObservedRunningTime="2025-12-02 17:01:34.325500915 +0000 UTC m=+1124.852389674" Dec 02 17:01:35 crc kubenswrapper[4747]: I1202 17:01:35.321205 4747 generic.go:334] "Generic (PLEG): container finished" podID="c1e65482-6e30-4e82-8c20-5fd991675dba" containerID="1032b52667a29caf97803b6352866579b779a707ef23ea8749fbeba61113fdc3" exitCode=0 Dec 02 17:01:35 crc kubenswrapper[4747]: I1202 17:01:35.321281 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c1e65482-6e30-4e82-8c20-5fd991675dba","Type":"ContainerDied","Data":"1032b52667a29caf97803b6352866579b779a707ef23ea8749fbeba61113fdc3"} Dec 02 17:01:35 crc kubenswrapper[4747]: I1202 17:01:35.324408 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"de9ec28c-1521-4af6-8473-fe8bf1cabf5d","Type":"ContainerDied","Data":"475f928b37f0d950f812e8573b28652df496104f236973725f985612d55ab2ab"} Dec 02 17:01:35 crc kubenswrapper[4747]: I1202 17:01:35.324312 4747 generic.go:334] "Generic (PLEG): container finished" podID="de9ec28c-1521-4af6-8473-fe8bf1cabf5d" containerID="475f928b37f0d950f812e8573b28652df496104f236973725f985612d55ab2ab" exitCode=0 Dec 02 17:01:35 crc kubenswrapper[4747]: I1202 17:01:35.324864 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 02 17:01:36 crc kubenswrapper[4747]: I1202 17:01:36.335717 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"eb9eddf0-4be4-48f8-bcfe-083310ec9333","Type":"ContainerStarted","Data":"2ad3be15cbc0149d6b80d4d55cbf62b6d086a55b81161190afe5c8d269279a46"} Dec 02 17:01:36 crc kubenswrapper[4747]: I1202 17:01:36.338090 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lqt7r" event={"ID":"ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020","Type":"ContainerStarted","Data":"e7bee80c205c3a8124c5d9bb1d2b961e3e9dd3219c47b674c17a9d94cde8e3d4"} Dec 02 17:01:36 crc kubenswrapper[4747]: I1202 17:01:36.338242 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-lqt7r" Dec 02 17:01:36 crc kubenswrapper[4747]: I1202 17:01:36.340614 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c1e65482-6e30-4e82-8c20-5fd991675dba","Type":"ContainerStarted","Data":"4ee745a649c5d7ddb54437014f7bb1aab31657225b9f321f76d8f63c4f3be6b1"} Dec 02 17:01:36 crc kubenswrapper[4747]: I1202 17:01:36.342781 4747 generic.go:334] "Generic (PLEG): container finished" podID="46dfbb1c-1b65-4f5d-9087-94743cb4c00e" containerID="ffb2beb07779cfadd90c3ae1fcb24bd7c2394b52a1330e4db10549150313eba0" exitCode=0 Dec 02 17:01:36 crc kubenswrapper[4747]: I1202 17:01:36.342870 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hbnrp" event={"ID":"46dfbb1c-1b65-4f5d-9087-94743cb4c00e","Type":"ContainerDied","Data":"ffb2beb07779cfadd90c3ae1fcb24bd7c2394b52a1330e4db10549150313eba0"} Dec 02 17:01:36 crc kubenswrapper[4747]: I1202 17:01:36.346458 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"de9ec28c-1521-4af6-8473-fe8bf1cabf5d","Type":"ContainerStarted","Data":"e0e688c71f7c9475705c85ccfb1e4ee119b1531ff74ee9d6a9b3d01162e4bc17"} Dec 02 17:01:36 crc kubenswrapper[4747]: I1202 17:01:36.349120 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"5a8b10db-030f-4419-9fa8-c500ae646151","Type":"ContainerStarted","Data":"9bf6951d4bb20351e29d557a1d6aa4d45b13b1c5f2683b8a257b4d3a1fc3c5a6"} Dec 02 17:01:36 crc kubenswrapper[4747]: I1202 17:01:36.368415 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-lqt7r" podStartSLOduration=20.006447781 podStartE2EDuration="23.368389696s" podCreationTimestamp="2025-12-02 17:01:13 +0000 UTC" firstStartedPulling="2025-12-02 17:01:32.134015454 +0000 UTC m=+1122.660904203" lastFinishedPulling="2025-12-02 17:01:35.495957369 +0000 UTC m=+1126.022846118" observedRunningTime="2025-12-02 17:01:36.361285395 +0000 UTC m=+1126.888174144" watchObservedRunningTime="2025-12-02 17:01:36.368389696 +0000 UTC m=+1126.895278445" Dec 02 17:01:36 crc kubenswrapper[4747]: I1202 17:01:36.389472 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=8.484228196 podStartE2EDuration="30.389446913s" podCreationTimestamp="2025-12-02 17:01:06 +0000 UTC" firstStartedPulling="2025-12-02 17:01:09.59030417 +0000 UTC m=+1100.117192919" lastFinishedPulling="2025-12-02 17:01:31.495522887 +0000 UTC m=+1122.022411636" observedRunningTime="2025-12-02 17:01:36.382488706 +0000 UTC m=+1126.909377465" watchObservedRunningTime="2025-12-02 17:01:36.389446913 +0000 UTC m=+1126.916335672" Dec 02 17:01:36 crc kubenswrapper[4747]: I1202 17:01:36.441849 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=8.700899103 podStartE2EDuration="32.441826837s" podCreationTimestamp="2025-12-02 17:01:04 +0000 UTC" firstStartedPulling="2025-12-02 17:01:07.571239207 +0000 UTC m=+1098.098127946" lastFinishedPulling="2025-12-02 17:01:31.312166931 +0000 UTC m=+1121.839055680" observedRunningTime="2025-12-02 17:01:36.435753805 +0000 UTC m=+1126.962642554" watchObservedRunningTime="2025-12-02 17:01:36.441826837 +0000 UTC m=+1126.968715586" Dec 02 17:01:36 crc kubenswrapper[4747]: I1202 17:01:36.736748 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 02 17:01:36 crc kubenswrapper[4747]: I1202 17:01:36.736828 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.049994 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-47dr4"] Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.051793 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.055347 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.064127 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-47dr4"] Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.203463 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0e28eacf-a679-45a6-9ab1-065400244faf-ovn-rundir\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.203549 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e28eacf-a679-45a6-9ab1-065400244faf-config\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.203598 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e28eacf-a679-45a6-9ab1-065400244faf-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.203674 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e28eacf-a679-45a6-9ab1-065400244faf-combined-ca-bundle\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.203756 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0e28eacf-a679-45a6-9ab1-065400244faf-ovs-rundir\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.203792 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nvpc\" (UniqueName: \"kubernetes.io/projected/0e28eacf-a679-45a6-9ab1-065400244faf-kube-api-access-6nvpc\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.230095 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q898h"] Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.253393 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-2lzln"] Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.255716 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.261886 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.296244 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-2lzln"] Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.305414 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e28eacf-a679-45a6-9ab1-065400244faf-combined-ca-bundle\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.305567 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0e28eacf-a679-45a6-9ab1-065400244faf-ovs-rundir\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.305613 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nvpc\" (UniqueName: \"kubernetes.io/projected/0e28eacf-a679-45a6-9ab1-065400244faf-kube-api-access-6nvpc\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.305660 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0e28eacf-a679-45a6-9ab1-065400244faf-ovn-rundir\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.305722 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e28eacf-a679-45a6-9ab1-065400244faf-config\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.305765 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e28eacf-a679-45a6-9ab1-065400244faf-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.306750 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0e28eacf-a679-45a6-9ab1-065400244faf-ovs-rundir\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.308840 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e28eacf-a679-45a6-9ab1-065400244faf-config\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.309758 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0e28eacf-a679-45a6-9ab1-065400244faf-ovn-rundir\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.325942 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e28eacf-a679-45a6-9ab1-065400244faf-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.326732 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e28eacf-a679-45a6-9ab1-065400244faf-combined-ca-bundle\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.333806 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nvpc\" (UniqueName: \"kubernetes.io/projected/0e28eacf-a679-45a6-9ab1-065400244faf-kube-api-access-6nvpc\") pod \"ovn-controller-metrics-47dr4\" (UID: \"0e28eacf-a679-45a6-9ab1-065400244faf\") " pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.378016 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hbnrp" event={"ID":"46dfbb1c-1b65-4f5d-9087-94743cb4c00e","Type":"ContainerStarted","Data":"5dbaf417aec18671c20816dc65a1ddd1ddc5618cd429a9df1b584c31a27d5f3d"} Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.378101 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hbnrp" event={"ID":"46dfbb1c-1b65-4f5d-9087-94743cb4c00e","Type":"ContainerStarted","Data":"674d2a4814f884d10f5d0e02604aee2d76d0110e804215778756fe59747c79d3"} Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.404647 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-47dr4" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.405143 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-hbnrp" podStartSLOduration=21.236203676 podStartE2EDuration="24.404825391s" podCreationTimestamp="2025-12-02 17:01:13 +0000 UTC" firstStartedPulling="2025-12-02 17:01:32.132628955 +0000 UTC m=+1122.659517704" lastFinishedPulling="2025-12-02 17:01:35.30125068 +0000 UTC m=+1125.828139419" observedRunningTime="2025-12-02 17:01:37.399899031 +0000 UTC m=+1127.926787780" watchObservedRunningTime="2025-12-02 17:01:37.404825391 +0000 UTC m=+1127.931714150" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.407736 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-2lzln\" (UID: \"e47bc103-df85-4101-8780-3fd68722f000\") " pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.407927 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-2lzln\" (UID: \"e47bc103-df85-4101-8780-3fd68722f000\") " pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.408130 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-config\") pod \"dnsmasq-dns-7f896c8c65-2lzln\" (UID: \"e47bc103-df85-4101-8780-3fd68722f000\") " pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.408207 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xxcz\" (UniqueName: \"kubernetes.io/projected/e47bc103-df85-4101-8780-3fd68722f000-kube-api-access-4xxcz\") pod \"dnsmasq-dns-7f896c8c65-2lzln\" (UID: \"e47bc103-df85-4101-8780-3fd68722f000\") " pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.473645 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-lx4fx"] Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.506248 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-g95vq"] Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.507951 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.509522 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-2lzln\" (UID: \"e47bc103-df85-4101-8780-3fd68722f000\") " pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.509598 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-config\") pod \"dnsmasq-dns-7f896c8c65-2lzln\" (UID: \"e47bc103-df85-4101-8780-3fd68722f000\") " pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.509643 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xxcz\" (UniqueName: \"kubernetes.io/projected/e47bc103-df85-4101-8780-3fd68722f000-kube-api-access-4xxcz\") pod \"dnsmasq-dns-7f896c8c65-2lzln\" (UID: \"e47bc103-df85-4101-8780-3fd68722f000\") " pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.509740 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-2lzln\" (UID: \"e47bc103-df85-4101-8780-3fd68722f000\") " pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.510200 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.511454 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-config\") pod \"dnsmasq-dns-7f896c8c65-2lzln\" (UID: \"e47bc103-df85-4101-8780-3fd68722f000\") " pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.512042 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-2lzln\" (UID: \"e47bc103-df85-4101-8780-3fd68722f000\") " pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.513280 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-2lzln\" (UID: \"e47bc103-df85-4101-8780-3fd68722f000\") " pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.547798 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-g95vq"] Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.562539 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xxcz\" (UniqueName: \"kubernetes.io/projected/e47bc103-df85-4101-8780-3fd68722f000-kube-api-access-4xxcz\") pod \"dnsmasq-dns-7f896c8c65-2lzln\" (UID: \"e47bc103-df85-4101-8780-3fd68722f000\") " pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.605611 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.612002 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-config\") pod \"dnsmasq-dns-86db49b7ff-g95vq\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.612071 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-g95vq\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.612222 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-g95vq\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.612256 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fthg\" (UniqueName: \"kubernetes.io/projected/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-kube-api-access-9fthg\") pod \"dnsmasq-dns-86db49b7ff-g95vq\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.612306 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-g95vq\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.714193 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-g95vq\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.714383 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fthg\" (UniqueName: \"kubernetes.io/projected/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-kube-api-access-9fthg\") pod \"dnsmasq-dns-86db49b7ff-g95vq\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.714685 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-g95vq\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.714881 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-config\") pod \"dnsmasq-dns-86db49b7ff-g95vq\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.715061 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-g95vq\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.715783 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-config\") pod \"dnsmasq-dns-86db49b7ff-g95vq\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.715898 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-g95vq\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.716428 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-g95vq\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.722304 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-g95vq\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.767539 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fthg\" (UniqueName: \"kubernetes.io/projected/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-kube-api-access-9fthg\") pod \"dnsmasq-dns-86db49b7ff-g95vq\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.884239 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:37 crc kubenswrapper[4747]: I1202 17:01:37.960024 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-q898h" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.012358 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-lx4fx" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.120398 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.120548 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.122272 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-dns-svc\") pod \"ee8ab716-dbff-4db7-b44f-de98c75f0dcf\" (UID: \"ee8ab716-dbff-4db7-b44f-de98c75f0dcf\") " Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.122393 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dv4tw\" (UniqueName: \"kubernetes.io/projected/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-kube-api-access-dv4tw\") pod \"5fb917c8-4122-4bd3-8cf8-c0c06e782ea5\" (UID: \"5fb917c8-4122-4bd3-8cf8-c0c06e782ea5\") " Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.122424 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2k2g\" (UniqueName: \"kubernetes.io/projected/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-kube-api-access-q2k2g\") pod \"ee8ab716-dbff-4db7-b44f-de98c75f0dcf\" (UID: \"ee8ab716-dbff-4db7-b44f-de98c75f0dcf\") " Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.122483 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-dns-svc\") pod \"5fb917c8-4122-4bd3-8cf8-c0c06e782ea5\" (UID: \"5fb917c8-4122-4bd3-8cf8-c0c06e782ea5\") " Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.122501 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-config\") pod \"ee8ab716-dbff-4db7-b44f-de98c75f0dcf\" (UID: \"ee8ab716-dbff-4db7-b44f-de98c75f0dcf\") " Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.122535 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-config\") pod \"5fb917c8-4122-4bd3-8cf8-c0c06e782ea5\" (UID: \"5fb917c8-4122-4bd3-8cf8-c0c06e782ea5\") " Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.122758 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ee8ab716-dbff-4db7-b44f-de98c75f0dcf" (UID: "ee8ab716-dbff-4db7-b44f-de98c75f0dcf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.122872 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.123208 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-config" (OuterVolumeSpecName: "config") pod "ee8ab716-dbff-4db7-b44f-de98c75f0dcf" (UID: "ee8ab716-dbff-4db7-b44f-de98c75f0dcf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.123219 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5fb917c8-4122-4bd3-8cf8-c0c06e782ea5" (UID: "5fb917c8-4122-4bd3-8cf8-c0c06e782ea5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.127601 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-config" (OuterVolumeSpecName: "config") pod "5fb917c8-4122-4bd3-8cf8-c0c06e782ea5" (UID: "5fb917c8-4122-4bd3-8cf8-c0c06e782ea5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.130130 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-kube-api-access-q2k2g" (OuterVolumeSpecName: "kube-api-access-q2k2g") pod "ee8ab716-dbff-4db7-b44f-de98c75f0dcf" (UID: "ee8ab716-dbff-4db7-b44f-de98c75f0dcf"). InnerVolumeSpecName "kube-api-access-q2k2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.144215 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-kube-api-access-dv4tw" (OuterVolumeSpecName: "kube-api-access-dv4tw") pod "5fb917c8-4122-4bd3-8cf8-c0c06e782ea5" (UID: "5fb917c8-4122-4bd3-8cf8-c0c06e782ea5"). InnerVolumeSpecName "kube-api-access-dv4tw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.184892 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.224683 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dv4tw\" (UniqueName: \"kubernetes.io/projected/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-kube-api-access-dv4tw\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.224728 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2k2g\" (UniqueName: \"kubernetes.io/projected/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-kube-api-access-q2k2g\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.224745 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee8ab716-dbff-4db7-b44f-de98c75f0dcf-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.224759 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.224772 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.250220 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-47dr4"] Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.285543 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-2lzln"] Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.388460 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-q898h" event={"ID":"5fb917c8-4122-4bd3-8cf8-c0c06e782ea5","Type":"ContainerDied","Data":"94a0b5c57e4439915acdc2da4a16880b575732b4666f8db5c2a521f2eaecef3c"} Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.388561 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-q898h" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.393636 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" event={"ID":"e47bc103-df85-4101-8780-3fd68722f000","Type":"ContainerStarted","Data":"999c5e987f138d3d2af126937a728f389f63b7fc19497497a03dd9741da7c842"} Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.397170 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-47dr4" event={"ID":"0e28eacf-a679-45a6-9ab1-065400244faf","Type":"ContainerStarted","Data":"7de1b9b86b251a0a2bf77db76ecce36ab7afaeeecc616d46bc0f7ad9fec13eb1"} Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.398717 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-lx4fx" event={"ID":"ee8ab716-dbff-4db7-b44f-de98c75f0dcf","Type":"ContainerDied","Data":"72d90b8c87965055f80829c2dc08a926399901f35f062118b988e7ec4572fff3"} Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.398886 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-lx4fx" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.400412 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.400441 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.485528 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q898h"] Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.499405 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q898h"] Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.521945 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-lx4fx"] Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.537221 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-lx4fx"] Dec 02 17:01:38 crc kubenswrapper[4747]: I1202 17:01:38.553710 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-g95vq"] Dec 02 17:01:38 crc kubenswrapper[4747]: W1202 17:01:38.561617 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c7ee886_ad3d_482d_a9e1_1b83fb44ccc8.slice/crio-fb99747c6dc17cf90bc8b53594e8c09c2edd9caed8f11ada3f8a33ccd626e5e4 WatchSource:0}: Error finding container fb99747c6dc17cf90bc8b53594e8c09c2edd9caed8f11ada3f8a33ccd626e5e4: Status 404 returned error can't find the container with id fb99747c6dc17cf90bc8b53594e8c09c2edd9caed8f11ada3f8a33ccd626e5e4 Dec 02 17:01:39 crc kubenswrapper[4747]: I1202 17:01:39.416956 4747 generic.go:334] "Generic (PLEG): container finished" podID="e47bc103-df85-4101-8780-3fd68722f000" containerID="4c4691959306baa9f4f7406c2200688fd935d2ff9c25b5173e31aee5db9503de" exitCode=0 Dec 02 17:01:39 crc kubenswrapper[4747]: I1202 17:01:39.417004 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" event={"ID":"e47bc103-df85-4101-8780-3fd68722f000","Type":"ContainerDied","Data":"4c4691959306baa9f4f7406c2200688fd935d2ff9c25b5173e31aee5db9503de"} Dec 02 17:01:39 crc kubenswrapper[4747]: I1202 17:01:39.422478 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" event={"ID":"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8","Type":"ContainerStarted","Data":"fb99747c6dc17cf90bc8b53594e8c09c2edd9caed8f11ada3f8a33ccd626e5e4"} Dec 02 17:01:39 crc kubenswrapper[4747]: I1202 17:01:39.772985 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fb917c8-4122-4bd3-8cf8-c0c06e782ea5" path="/var/lib/kubelet/pods/5fb917c8-4122-4bd3-8cf8-c0c06e782ea5/volumes" Dec 02 17:01:39 crc kubenswrapper[4747]: I1202 17:01:39.773424 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee8ab716-dbff-4db7-b44f-de98c75f0dcf" path="/var/lib/kubelet/pods/ee8ab716-dbff-4db7-b44f-de98c75f0dcf/volumes" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.086050 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-2lzln"] Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.142957 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-nlq7d"] Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.146721 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.158052 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-nlq7d"] Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.181766 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-dns-svc\") pod \"dnsmasq-dns-698758b865-nlq7d\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.181854 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-nlq7d\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.183771 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-config\") pod \"dnsmasq-dns-698758b865-nlq7d\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.183813 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bs7p8\" (UniqueName: \"kubernetes.io/projected/4e94e53f-7429-4ad7-86b8-cee6a01b0493-kube-api-access-bs7p8\") pod \"dnsmasq-dns-698758b865-nlq7d\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.183842 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-nlq7d\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.293625 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-config\") pod \"dnsmasq-dns-698758b865-nlq7d\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.293689 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bs7p8\" (UniqueName: \"kubernetes.io/projected/4e94e53f-7429-4ad7-86b8-cee6a01b0493-kube-api-access-bs7p8\") pod \"dnsmasq-dns-698758b865-nlq7d\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.293717 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-nlq7d\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.293764 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-dns-svc\") pod \"dnsmasq-dns-698758b865-nlq7d\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.293832 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-nlq7d\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.294800 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-nlq7d\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.295174 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-nlq7d\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.295510 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-dns-svc\") pod \"dnsmasq-dns-698758b865-nlq7d\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.297017 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-config\") pod \"dnsmasq-dns-698758b865-nlq7d\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.324122 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bs7p8\" (UniqueName: \"kubernetes.io/projected/4e94e53f-7429-4ad7-86b8-cee6a01b0493-kube-api-access-bs7p8\") pod \"dnsmasq-dns-698758b865-nlq7d\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.375303 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.488296 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.786211 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:40 crc kubenswrapper[4747]: I1202 17:01:40.977440 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.246791 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.264785 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.273777 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.273923 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-kxhwb" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.274218 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.274439 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.290368 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.388382 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6251344f-5dcf-4cc8-ae89-85d58abaf41b-cache\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.388510 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95t96\" (UniqueName: \"kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-kube-api-access-95t96\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.388550 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.388597 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.389031 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6251344f-5dcf-4cc8-ae89-85d58abaf41b-lock\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.490710 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6251344f-5dcf-4cc8-ae89-85d58abaf41b-lock\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.490853 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6251344f-5dcf-4cc8-ae89-85d58abaf41b-cache\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.490996 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95t96\" (UniqueName: \"kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-kube-api-access-95t96\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.491049 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.491072 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:41 crc kubenswrapper[4747]: E1202 17:01:41.491260 4747 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 17:01:41 crc kubenswrapper[4747]: E1202 17:01:41.491279 4747 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.491337 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6251344f-5dcf-4cc8-ae89-85d58abaf41b-lock\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:41 crc kubenswrapper[4747]: E1202 17:01:41.491361 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift podName:6251344f-5dcf-4cc8-ae89-85d58abaf41b nodeName:}" failed. No retries permitted until 2025-12-02 17:01:41.991342293 +0000 UTC m=+1132.518231062 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift") pod "swift-storage-0" (UID: "6251344f-5dcf-4cc8-ae89-85d58abaf41b") : configmap "swift-ring-files" not found Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.491520 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6251344f-5dcf-4cc8-ae89-85d58abaf41b-cache\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.491540 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/swift-storage-0" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.519141 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.541841 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95t96\" (UniqueName: \"kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-kube-api-access-95t96\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.799280 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-mbqqh"] Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.800518 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-mbqqh"] Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.800620 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.803102 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.803403 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.803537 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.900870 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-ring-data-devices\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.900993 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-dispersionconf\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.901090 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-scripts\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.901155 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-swiftconf\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.901233 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9btf\" (UniqueName: \"kubernetes.io/projected/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-kube-api-access-t9btf\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.901320 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-etc-swift\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:41 crc kubenswrapper[4747]: I1202 17:01:41.901562 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-combined-ca-bundle\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.003104 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-etc-swift\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.003373 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-combined-ca-bundle\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.003408 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.003429 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-ring-data-devices\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.003466 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-dispersionconf\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.003499 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-scripts\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.003520 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-swiftconf\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.003551 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9btf\" (UniqueName: \"kubernetes.io/projected/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-kube-api-access-t9btf\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.004309 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-etc-swift\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.008839 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-ring-data-devices\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.009045 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-scripts\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:42 crc kubenswrapper[4747]: E1202 17:01:42.011090 4747 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 17:01:42 crc kubenswrapper[4747]: E1202 17:01:42.011109 4747 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 17:01:42 crc kubenswrapper[4747]: E1202 17:01:42.011166 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift podName:6251344f-5dcf-4cc8-ae89-85d58abaf41b nodeName:}" failed. No retries permitted until 2025-12-02 17:01:43.011149405 +0000 UTC m=+1133.538038154 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift") pod "swift-storage-0" (UID: "6251344f-5dcf-4cc8-ae89-85d58abaf41b") : configmap "swift-ring-files" not found Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.012875 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-combined-ca-bundle\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.013694 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-dispersionconf\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.030786 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-swiftconf\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.037855 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9btf\" (UniqueName: \"kubernetes.io/projected/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-kube-api-access-t9btf\") pod \"swift-ring-rebalance-mbqqh\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.124015 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.481805 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-nlq7d"] Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.484565 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" event={"ID":"e47bc103-df85-4101-8780-3fd68722f000","Type":"ContainerStarted","Data":"588b79a2246a28a9cd6f59c6381161a496695bd914c0ef1d2dd8b0b0cc34de58"} Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.484701 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" podUID="e47bc103-df85-4101-8780-3fd68722f000" containerName="dnsmasq-dns" containerID="cri-o://588b79a2246a28a9cd6f59c6381161a496695bd914c0ef1d2dd8b0b0cc34de58" gracePeriod=10 Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.485294 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" Dec 02 17:01:42 crc kubenswrapper[4747]: W1202 17:01:42.491980 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e94e53f_7429_4ad7_86b8_cee6a01b0493.slice/crio-909cdccc449ebc05d7873060b80ff7aaeef93cc7288cac16f966bd4d44fac494 WatchSource:0}: Error finding container 909cdccc449ebc05d7873060b80ff7aaeef93cc7288cac16f966bd4d44fac494: Status 404 returned error can't find the container with id 909cdccc449ebc05d7873060b80ff7aaeef93cc7288cac16f966bd4d44fac494 Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.526826 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" podStartSLOduration=4.935525242 podStartE2EDuration="5.52677442s" podCreationTimestamp="2025-12-02 17:01:37 +0000 UTC" firstStartedPulling="2025-12-02 17:01:38.293186589 +0000 UTC m=+1128.820075338" lastFinishedPulling="2025-12-02 17:01:38.884435757 +0000 UTC m=+1129.411324516" observedRunningTime="2025-12-02 17:01:42.513622377 +0000 UTC m=+1133.040511126" watchObservedRunningTime="2025-12-02 17:01:42.52677442 +0000 UTC m=+1133.053663169" Dec 02 17:01:42 crc kubenswrapper[4747]: I1202 17:01:42.791123 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-mbqqh"] Dec 02 17:01:42 crc kubenswrapper[4747]: W1202 17:01:42.808278 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf19a9557_2cbf_4fd9_a4a0_6f13ec2a572b.slice/crio-581ae87a383c40d8bd4a4dea35e4fe05c7884ef8281be7fa230c59fcba73de3c WatchSource:0}: Error finding container 581ae87a383c40d8bd4a4dea35e4fe05c7884ef8281be7fa230c59fcba73de3c: Status 404 returned error can't find the container with id 581ae87a383c40d8bd4a4dea35e4fe05c7884ef8281be7fa230c59fcba73de3c Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.024631 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:43 crc kubenswrapper[4747]: E1202 17:01:43.024883 4747 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 17:01:43 crc kubenswrapper[4747]: E1202 17:01:43.024939 4747 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 17:01:43 crc kubenswrapper[4747]: E1202 17:01:43.025010 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift podName:6251344f-5dcf-4cc8-ae89-85d58abaf41b nodeName:}" failed. No retries permitted until 2025-12-02 17:01:45.02498202 +0000 UTC m=+1135.551870769 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift") pod "swift-storage-0" (UID: "6251344f-5dcf-4cc8-ae89-85d58abaf41b") : configmap "swift-ring-files" not found Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.101502 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.228983 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xxcz\" (UniqueName: \"kubernetes.io/projected/e47bc103-df85-4101-8780-3fd68722f000-kube-api-access-4xxcz\") pod \"e47bc103-df85-4101-8780-3fd68722f000\" (UID: \"e47bc103-df85-4101-8780-3fd68722f000\") " Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.229135 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-ovsdbserver-sb\") pod \"e47bc103-df85-4101-8780-3fd68722f000\" (UID: \"e47bc103-df85-4101-8780-3fd68722f000\") " Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.229426 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-config\") pod \"e47bc103-df85-4101-8780-3fd68722f000\" (UID: \"e47bc103-df85-4101-8780-3fd68722f000\") " Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.229462 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-dns-svc\") pod \"e47bc103-df85-4101-8780-3fd68722f000\" (UID: \"e47bc103-df85-4101-8780-3fd68722f000\") " Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.240709 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e47bc103-df85-4101-8780-3fd68722f000-kube-api-access-4xxcz" (OuterVolumeSpecName: "kube-api-access-4xxcz") pod "e47bc103-df85-4101-8780-3fd68722f000" (UID: "e47bc103-df85-4101-8780-3fd68722f000"). InnerVolumeSpecName "kube-api-access-4xxcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.275229 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e47bc103-df85-4101-8780-3fd68722f000" (UID: "e47bc103-df85-4101-8780-3fd68722f000"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.291369 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-config" (OuterVolumeSpecName: "config") pod "e47bc103-df85-4101-8780-3fd68722f000" (UID: "e47bc103-df85-4101-8780-3fd68722f000"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.291885 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e47bc103-df85-4101-8780-3fd68722f000" (UID: "e47bc103-df85-4101-8780-3fd68722f000"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.332067 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.332388 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.332402 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xxcz\" (UniqueName: \"kubernetes.io/projected/e47bc103-df85-4101-8780-3fd68722f000-kube-api-access-4xxcz\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.332415 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e47bc103-df85-4101-8780-3fd68722f000-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.512031 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-nlq7d" event={"ID":"4e94e53f-7429-4ad7-86b8-cee6a01b0493","Type":"ContainerStarted","Data":"06146c17b1d3189609edf70f1249e65cd9d38ba89555cb13114ce7c3cd9f8d94"} Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.512078 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-nlq7d" event={"ID":"4e94e53f-7429-4ad7-86b8-cee6a01b0493","Type":"ContainerStarted","Data":"909cdccc449ebc05d7873060b80ff7aaeef93cc7288cac16f966bd4d44fac494"} Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.513813 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-47dr4" event={"ID":"0e28eacf-a679-45a6-9ab1-065400244faf","Type":"ContainerStarted","Data":"9a213def6620b55dff44652fc970eb9378427f84af92aa728b599b8b41d021b2"} Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.515126 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-mbqqh" event={"ID":"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b","Type":"ContainerStarted","Data":"581ae87a383c40d8bd4a4dea35e4fe05c7884ef8281be7fa230c59fcba73de3c"} Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.517645 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"eb9eddf0-4be4-48f8-bcfe-083310ec9333","Type":"ContainerStarted","Data":"3ab9fadf4a8e09316d0789d8b43950b06e2ba4f53475e64a31c9ad17d6cd51ba"} Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.521896 4747 generic.go:334] "Generic (PLEG): container finished" podID="e47bc103-df85-4101-8780-3fd68722f000" containerID="588b79a2246a28a9cd6f59c6381161a496695bd914c0ef1d2dd8b0b0cc34de58" exitCode=0 Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.522013 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" event={"ID":"e47bc103-df85-4101-8780-3fd68722f000","Type":"ContainerDied","Data":"588b79a2246a28a9cd6f59c6381161a496695bd914c0ef1d2dd8b0b0cc34de58"} Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.522107 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" event={"ID":"e47bc103-df85-4101-8780-3fd68722f000","Type":"ContainerDied","Data":"999c5e987f138d3d2af126937a728f389f63b7fc19497497a03dd9741da7c842"} Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.522177 4747 scope.go:117] "RemoveContainer" containerID="588b79a2246a28a9cd6f59c6381161a496695bd914c0ef1d2dd8b0b0cc34de58" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.522322 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-2lzln" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.537642 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" event={"ID":"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8","Type":"ContainerStarted","Data":"640a0a82f7c130e4096ec7026a17154268ace62bffb9c6ad49381ac7db777820"} Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.539973 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"5a8b10db-030f-4419-9fa8-c500ae646151","Type":"ContainerStarted","Data":"789b3262081fb527b0afebd8712ef126a14766e070dab3ae1ef3c98f45b1b5de"} Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.558566 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=17.673898369 podStartE2EDuration="27.558547073s" podCreationTimestamp="2025-12-02 17:01:16 +0000 UTC" firstStartedPulling="2025-12-02 17:01:32.218762946 +0000 UTC m=+1122.745651695" lastFinishedPulling="2025-12-02 17:01:42.10341165 +0000 UTC m=+1132.630300399" observedRunningTime="2025-12-02 17:01:43.55139736 +0000 UTC m=+1134.078286109" watchObservedRunningTime="2025-12-02 17:01:43.558547073 +0000 UTC m=+1134.085435822" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.607317 4747 scope.go:117] "RemoveContainer" containerID="4c4691959306baa9f4f7406c2200688fd935d2ff9c25b5173e31aee5db9503de" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.610047 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=21.77129592 podStartE2EDuration="30.610022712s" podCreationTimestamp="2025-12-02 17:01:13 +0000 UTC" firstStartedPulling="2025-12-02 17:01:33.263492655 +0000 UTC m=+1123.790381404" lastFinishedPulling="2025-12-02 17:01:42.102219447 +0000 UTC m=+1132.629108196" observedRunningTime="2025-12-02 17:01:43.575063031 +0000 UTC m=+1134.101951780" watchObservedRunningTime="2025-12-02 17:01:43.610022712 +0000 UTC m=+1134.136911461" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.616848 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-2lzln"] Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.622685 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-2lzln"] Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.629975 4747 scope.go:117] "RemoveContainer" containerID="588b79a2246a28a9cd6f59c6381161a496695bd914c0ef1d2dd8b0b0cc34de58" Dec 02 17:01:43 crc kubenswrapper[4747]: E1202 17:01:43.630651 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"588b79a2246a28a9cd6f59c6381161a496695bd914c0ef1d2dd8b0b0cc34de58\": container with ID starting with 588b79a2246a28a9cd6f59c6381161a496695bd914c0ef1d2dd8b0b0cc34de58 not found: ID does not exist" containerID="588b79a2246a28a9cd6f59c6381161a496695bd914c0ef1d2dd8b0b0cc34de58" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.630813 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"588b79a2246a28a9cd6f59c6381161a496695bd914c0ef1d2dd8b0b0cc34de58"} err="failed to get container status \"588b79a2246a28a9cd6f59c6381161a496695bd914c0ef1d2dd8b0b0cc34de58\": rpc error: code = NotFound desc = could not find container \"588b79a2246a28a9cd6f59c6381161a496695bd914c0ef1d2dd8b0b0cc34de58\": container with ID starting with 588b79a2246a28a9cd6f59c6381161a496695bd914c0ef1d2dd8b0b0cc34de58 not found: ID does not exist" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.630857 4747 scope.go:117] "RemoveContainer" containerID="4c4691959306baa9f4f7406c2200688fd935d2ff9c25b5173e31aee5db9503de" Dec 02 17:01:43 crc kubenswrapper[4747]: E1202 17:01:43.631227 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c4691959306baa9f4f7406c2200688fd935d2ff9c25b5173e31aee5db9503de\": container with ID starting with 4c4691959306baa9f4f7406c2200688fd935d2ff9c25b5173e31aee5db9503de not found: ID does not exist" containerID="4c4691959306baa9f4f7406c2200688fd935d2ff9c25b5173e31aee5db9503de" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.631257 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c4691959306baa9f4f7406c2200688fd935d2ff9c25b5173e31aee5db9503de"} err="failed to get container status \"4c4691959306baa9f4f7406c2200688fd935d2ff9c25b5173e31aee5db9503de\": rpc error: code = NotFound desc = could not find container \"4c4691959306baa9f4f7406c2200688fd935d2ff9c25b5173e31aee5db9503de\": container with ID starting with 4c4691959306baa9f4f7406c2200688fd935d2ff9c25b5173e31aee5db9503de not found: ID does not exist" Dec 02 17:01:43 crc kubenswrapper[4747]: I1202 17:01:43.778676 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e47bc103-df85-4101-8780-3fd68722f000" path="/var/lib/kubelet/pods/e47bc103-df85-4101-8780-3fd68722f000/volumes" Dec 02 17:01:44 crc kubenswrapper[4747]: I1202 17:01:44.499099 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:44 crc kubenswrapper[4747]: I1202 17:01:44.499617 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:44 crc kubenswrapper[4747]: I1202 17:01:44.543099 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:44 crc kubenswrapper[4747]: I1202 17:01:44.601403 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 02 17:01:44 crc kubenswrapper[4747]: I1202 17:01:44.653368 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:44 crc kubenswrapper[4747]: I1202 17:01:44.706104 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.062458 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:45 crc kubenswrapper[4747]: E1202 17:01:45.062730 4747 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 17:01:45 crc kubenswrapper[4747]: E1202 17:01:45.062750 4747 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 17:01:45 crc kubenswrapper[4747]: E1202 17:01:45.062810 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift podName:6251344f-5dcf-4cc8-ae89-85d58abaf41b nodeName:}" failed. No retries permitted until 2025-12-02 17:01:49.062789907 +0000 UTC m=+1139.589678676 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift") pod "swift-storage-0" (UID: "6251344f-5dcf-4cc8-ae89-85d58abaf41b") : configmap "swift-ring-files" not found Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.562396 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.621582 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.795435 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 02 17:01:45 crc kubenswrapper[4747]: E1202 17:01:45.801311 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e47bc103-df85-4101-8780-3fd68722f000" containerName="init" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.801380 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e47bc103-df85-4101-8780-3fd68722f000" containerName="init" Dec 02 17:01:45 crc kubenswrapper[4747]: E1202 17:01:45.801444 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e47bc103-df85-4101-8780-3fd68722f000" containerName="dnsmasq-dns" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.801456 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e47bc103-df85-4101-8780-3fd68722f000" containerName="dnsmasq-dns" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.802143 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="e47bc103-df85-4101-8780-3fd68722f000" containerName="dnsmasq-dns" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.804372 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.807762 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.814620 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.815448 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.815501 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-gv5p5" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.816010 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.881760 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a525864-cf44-4b73-bc28-7bda185cd5f8-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.882304 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1a525864-cf44-4b73-bc28-7bda185cd5f8-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.882335 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a525864-cf44-4b73-bc28-7bda185cd5f8-config\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.882373 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a525864-cf44-4b73-bc28-7bda185cd5f8-scripts\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.882540 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6585q\" (UniqueName: \"kubernetes.io/projected/1a525864-cf44-4b73-bc28-7bda185cd5f8-kube-api-access-6585q\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.882751 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a525864-cf44-4b73-bc28-7bda185cd5f8-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.882836 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a525864-cf44-4b73-bc28-7bda185cd5f8-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.985184 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1a525864-cf44-4b73-bc28-7bda185cd5f8-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.985239 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a525864-cf44-4b73-bc28-7bda185cd5f8-config\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.985267 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a525864-cf44-4b73-bc28-7bda185cd5f8-scripts\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.985339 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6585q\" (UniqueName: \"kubernetes.io/projected/1a525864-cf44-4b73-bc28-7bda185cd5f8-kube-api-access-6585q\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.985394 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a525864-cf44-4b73-bc28-7bda185cd5f8-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.985411 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a525864-cf44-4b73-bc28-7bda185cd5f8-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.985431 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a525864-cf44-4b73-bc28-7bda185cd5f8-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.986219 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1a525864-cf44-4b73-bc28-7bda185cd5f8-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.986561 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a525864-cf44-4b73-bc28-7bda185cd5f8-config\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.987214 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1a525864-cf44-4b73-bc28-7bda185cd5f8-scripts\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.992038 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a525864-cf44-4b73-bc28-7bda185cd5f8-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.992206 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a525864-cf44-4b73-bc28-7bda185cd5f8-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:45 crc kubenswrapper[4747]: I1202 17:01:45.993027 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1a525864-cf44-4b73-bc28-7bda185cd5f8-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:46 crc kubenswrapper[4747]: I1202 17:01:46.010600 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6585q\" (UniqueName: \"kubernetes.io/projected/1a525864-cf44-4b73-bc28-7bda185cd5f8-kube-api-access-6585q\") pod \"ovn-northd-0\" (UID: \"1a525864-cf44-4b73-bc28-7bda185cd5f8\") " pod="openstack/ovn-northd-0" Dec 02 17:01:46 crc kubenswrapper[4747]: I1202 17:01:46.141019 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 02 17:01:46 crc kubenswrapper[4747]: I1202 17:01:46.569967 4747 generic.go:334] "Generic (PLEG): container finished" podID="5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8" containerID="640a0a82f7c130e4096ec7026a17154268ace62bffb9c6ad49381ac7db777820" exitCode=0 Dec 02 17:01:46 crc kubenswrapper[4747]: I1202 17:01:46.570010 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" event={"ID":"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8","Type":"ContainerDied","Data":"640a0a82f7c130e4096ec7026a17154268ace62bffb9c6ad49381ac7db777820"} Dec 02 17:01:46 crc kubenswrapper[4747]: I1202 17:01:46.575711 4747 generic.go:334] "Generic (PLEG): container finished" podID="4e94e53f-7429-4ad7-86b8-cee6a01b0493" containerID="06146c17b1d3189609edf70f1249e65cd9d38ba89555cb13114ce7c3cd9f8d94" exitCode=0 Dec 02 17:01:46 crc kubenswrapper[4747]: I1202 17:01:46.576618 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-nlq7d" event={"ID":"4e94e53f-7429-4ad7-86b8-cee6a01b0493","Type":"ContainerDied","Data":"06146c17b1d3189609edf70f1249e65cd9d38ba89555cb13114ce7c3cd9f8d94"} Dec 02 17:01:46 crc kubenswrapper[4747]: I1202 17:01:46.635803 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 02 17:01:46 crc kubenswrapper[4747]: W1202 17:01:46.637317 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a525864_cf44_4b73_bc28_7bda185cd5f8.slice/crio-56db6017ca9a974a2817143d0b35fc6a2bb042bdbafb884b3da81f87bed037ca WatchSource:0}: Error finding container 56db6017ca9a974a2817143d0b35fc6a2bb042bdbafb884b3da81f87bed037ca: Status 404 returned error can't find the container with id 56db6017ca9a974a2817143d0b35fc6a2bb042bdbafb884b3da81f87bed037ca Dec 02 17:01:47 crc kubenswrapper[4747]: I1202 17:01:47.586039 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1a525864-cf44-4b73-bc28-7bda185cd5f8","Type":"ContainerStarted","Data":"56db6017ca9a974a2817143d0b35fc6a2bb042bdbafb884b3da81f87bed037ca"} Dec 02 17:01:49 crc kubenswrapper[4747]: I1202 17:01:49.145742 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:49 crc kubenswrapper[4747]: E1202 17:01:49.145935 4747 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 17:01:49 crc kubenswrapper[4747]: E1202 17:01:49.146886 4747 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 17:01:49 crc kubenswrapper[4747]: E1202 17:01:49.147073 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift podName:6251344f-5dcf-4cc8-ae89-85d58abaf41b nodeName:}" failed. No retries permitted until 2025-12-02 17:01:57.147042545 +0000 UTC m=+1147.673931294 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift") pod "swift-storage-0" (UID: "6251344f-5dcf-4cc8-ae89-85d58abaf41b") : configmap "swift-ring-files" not found Dec 02 17:01:50 crc kubenswrapper[4747]: I1202 17:01:50.614756 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" event={"ID":"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8","Type":"ContainerStarted","Data":"e3a44bda2f15ab87e470267825412c15b772bb3fbf2f3806fb231284887c4044"} Dec 02 17:01:50 crc kubenswrapper[4747]: I1202 17:01:50.616921 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-nlq7d" event={"ID":"4e94e53f-7429-4ad7-86b8-cee6a01b0493","Type":"ContainerStarted","Data":"4e7d8a7b4e9b616f06849b9ab388e5cb6b7db333062485c6359f48ce32182f6d"} Dec 02 17:01:51 crc kubenswrapper[4747]: I1202 17:01:51.630341 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"84019d16-aa94-40fc-9615-45c7d3dcb7b3","Type":"ContainerStarted","Data":"7267e15b3d012b4499dfc73930d556fa9134fa3b5d4966724b95da81951561e9"} Dec 02 17:01:51 crc kubenswrapper[4747]: I1202 17:01:51.630432 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:51 crc kubenswrapper[4747]: I1202 17:01:51.630944 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:51 crc kubenswrapper[4747]: I1202 17:01:51.654877 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-47dr4" podStartSLOduration=10.86364048 podStartE2EDuration="14.654856203s" podCreationTimestamp="2025-12-02 17:01:37 +0000 UTC" firstStartedPulling="2025-12-02 17:01:38.272441161 +0000 UTC m=+1128.799329910" lastFinishedPulling="2025-12-02 17:01:42.063656884 +0000 UTC m=+1132.590545633" observedRunningTime="2025-12-02 17:01:51.645493937 +0000 UTC m=+1142.172382686" watchObservedRunningTime="2025-12-02 17:01:51.654856203 +0000 UTC m=+1142.181744972" Dec 02 17:01:51 crc kubenswrapper[4747]: I1202 17:01:51.684240 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" podStartSLOduration=11.181141719 podStartE2EDuration="14.684211035s" podCreationTimestamp="2025-12-02 17:01:37 +0000 UTC" firstStartedPulling="2025-12-02 17:01:38.56737822 +0000 UTC m=+1129.094266969" lastFinishedPulling="2025-12-02 17:01:42.070447536 +0000 UTC m=+1132.597336285" observedRunningTime="2025-12-02 17:01:51.674082468 +0000 UTC m=+1142.200971217" watchObservedRunningTime="2025-12-02 17:01:51.684211035 +0000 UTC m=+1142.211099784" Dec 02 17:01:51 crc kubenswrapper[4747]: I1202 17:01:51.700056 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-nlq7d" podStartSLOduration=11.700034803 podStartE2EDuration="11.700034803s" podCreationTimestamp="2025-12-02 17:01:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:01:51.695883156 +0000 UTC m=+1142.222771915" watchObservedRunningTime="2025-12-02 17:01:51.700034803 +0000 UTC m=+1142.226923572" Dec 02 17:01:51 crc kubenswrapper[4747]: I1202 17:01:51.786001 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 02 17:01:51 crc kubenswrapper[4747]: I1202 17:01:51.853879 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 02 17:01:52 crc kubenswrapper[4747]: I1202 17:01:52.640006 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1e1d5029-90ff-4315-8ba4-961286afbb54","Type":"ContainerStarted","Data":"92a581541edb61fddec32c7a8654bf8df5774ac48809da7ae01b0513d57d6723"} Dec 02 17:01:54 crc kubenswrapper[4747]: I1202 17:01:54.672379 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-mbqqh" event={"ID":"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b","Type":"ContainerStarted","Data":"4c0614a1275d1c527b0ea25e258a63c5d0659a975e9129585d2f0a409b8ffa02"} Dec 02 17:01:54 crc kubenswrapper[4747]: I1202 17:01:54.702405 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-mbqqh" podStartSLOduration=2.17604077 podStartE2EDuration="13.702379777s" podCreationTimestamp="2025-12-02 17:01:41 +0000 UTC" firstStartedPulling="2025-12-02 17:01:42.811599502 +0000 UTC m=+1133.338488251" lastFinishedPulling="2025-12-02 17:01:54.337938509 +0000 UTC m=+1144.864827258" observedRunningTime="2025-12-02 17:01:54.694853414 +0000 UTC m=+1145.221742183" watchObservedRunningTime="2025-12-02 17:01:54.702379777 +0000 UTC m=+1145.229268536" Dec 02 17:01:55 crc kubenswrapper[4747]: I1202 17:01:55.493151 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:01:55 crc kubenswrapper[4747]: I1202 17:01:55.607081 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-g95vq"] Dec 02 17:01:55 crc kubenswrapper[4747]: I1202 17:01:55.607339 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" podUID="5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8" containerName="dnsmasq-dns" containerID="cri-o://e3a44bda2f15ab87e470267825412c15b772bb3fbf2f3806fb231284887c4044" gracePeriod=10 Dec 02 17:01:55 crc kubenswrapper[4747]: I1202 17:01:55.614671 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" podUID="5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.111:5353: connect: connection refused" Dec 02 17:01:55 crc kubenswrapper[4747]: I1202 17:01:55.683663 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1a525864-cf44-4b73-bc28-7bda185cd5f8","Type":"ContainerStarted","Data":"8325328dcd8f702315d8bf82f8084c197f8ffe63ceba42a1deb12612ae1a6175"} Dec 02 17:01:55 crc kubenswrapper[4747]: I1202 17:01:55.684732 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1a525864-cf44-4b73-bc28-7bda185cd5f8","Type":"ContainerStarted","Data":"5c13516d605bdbf2aa07c6ec2ca52289e6ae7ea92ef90e95ffa159dd979dc1bc"} Dec 02 17:01:55 crc kubenswrapper[4747]: I1202 17:01:55.723360 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.665706261 podStartE2EDuration="10.723330594s" podCreationTimestamp="2025-12-02 17:01:45 +0000 UTC" firstStartedPulling="2025-12-02 17:01:46.639868326 +0000 UTC m=+1137.166757075" lastFinishedPulling="2025-12-02 17:01:54.697492659 +0000 UTC m=+1145.224381408" observedRunningTime="2025-12-02 17:01:55.706357553 +0000 UTC m=+1146.233246322" watchObservedRunningTime="2025-12-02 17:01:55.723330594 +0000 UTC m=+1146.250219343" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.057401 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.142433 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.235800 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-ovsdbserver-sb\") pod \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.235943 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9fthg\" (UniqueName: \"kubernetes.io/projected/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-kube-api-access-9fthg\") pod \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.236095 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-dns-svc\") pod \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.236130 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-ovsdbserver-nb\") pod \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.236170 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-config\") pod \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\" (UID: \"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8\") " Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.251173 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-kube-api-access-9fthg" (OuterVolumeSpecName: "kube-api-access-9fthg") pod "5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8" (UID: "5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8"). InnerVolumeSpecName "kube-api-access-9fthg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.276837 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8" (UID: "5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.280713 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8" (UID: "5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.285147 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-config" (OuterVolumeSpecName: "config") pod "5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8" (UID: "5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.299454 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8" (UID: "5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.338387 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.338438 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.338449 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.338460 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9fthg\" (UniqueName: \"kubernetes.io/projected/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-kube-api-access-9fthg\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.338472 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.693806 4747 generic.go:334] "Generic (PLEG): container finished" podID="5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8" containerID="e3a44bda2f15ab87e470267825412c15b772bb3fbf2f3806fb231284887c4044" exitCode=0 Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.693878 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.693878 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" event={"ID":"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8","Type":"ContainerDied","Data":"e3a44bda2f15ab87e470267825412c15b772bb3fbf2f3806fb231284887c4044"} Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.693938 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-g95vq" event={"ID":"5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8","Type":"ContainerDied","Data":"fb99747c6dc17cf90bc8b53594e8c09c2edd9caed8f11ada3f8a33ccd626e5e4"} Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.693960 4747 scope.go:117] "RemoveContainer" containerID="e3a44bda2f15ab87e470267825412c15b772bb3fbf2f3806fb231284887c4044" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.716038 4747 scope.go:117] "RemoveContainer" containerID="640a0a82f7c130e4096ec7026a17154268ace62bffb9c6ad49381ac7db777820" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.739738 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-g95vq"] Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.751466 4747 scope.go:117] "RemoveContainer" containerID="e3a44bda2f15ab87e470267825412c15b772bb3fbf2f3806fb231284887c4044" Dec 02 17:01:56 crc kubenswrapper[4747]: E1202 17:01:56.752011 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3a44bda2f15ab87e470267825412c15b772bb3fbf2f3806fb231284887c4044\": container with ID starting with e3a44bda2f15ab87e470267825412c15b772bb3fbf2f3806fb231284887c4044 not found: ID does not exist" containerID="e3a44bda2f15ab87e470267825412c15b772bb3fbf2f3806fb231284887c4044" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.752046 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3a44bda2f15ab87e470267825412c15b772bb3fbf2f3806fb231284887c4044"} err="failed to get container status \"e3a44bda2f15ab87e470267825412c15b772bb3fbf2f3806fb231284887c4044\": rpc error: code = NotFound desc = could not find container \"e3a44bda2f15ab87e470267825412c15b772bb3fbf2f3806fb231284887c4044\": container with ID starting with e3a44bda2f15ab87e470267825412c15b772bb3fbf2f3806fb231284887c4044 not found: ID does not exist" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.752072 4747 scope.go:117] "RemoveContainer" containerID="640a0a82f7c130e4096ec7026a17154268ace62bffb9c6ad49381ac7db777820" Dec 02 17:01:56 crc kubenswrapper[4747]: E1202 17:01:56.752311 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"640a0a82f7c130e4096ec7026a17154268ace62bffb9c6ad49381ac7db777820\": container with ID starting with 640a0a82f7c130e4096ec7026a17154268ace62bffb9c6ad49381ac7db777820 not found: ID does not exist" containerID="640a0a82f7c130e4096ec7026a17154268ace62bffb9c6ad49381ac7db777820" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.752333 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"640a0a82f7c130e4096ec7026a17154268ace62bffb9c6ad49381ac7db777820"} err="failed to get container status \"640a0a82f7c130e4096ec7026a17154268ace62bffb9c6ad49381ac7db777820\": rpc error: code = NotFound desc = could not find container \"640a0a82f7c130e4096ec7026a17154268ace62bffb9c6ad49381ac7db777820\": container with ID starting with 640a0a82f7c130e4096ec7026a17154268ace62bffb9c6ad49381ac7db777820 not found: ID does not exist" Dec 02 17:01:56 crc kubenswrapper[4747]: I1202 17:01:56.756669 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-g95vq"] Dec 02 17:01:56 crc kubenswrapper[4747]: E1202 17:01:56.903832 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c7ee886_ad3d_482d_a9e1_1b83fb44ccc8.slice/crio-fb99747c6dc17cf90bc8b53594e8c09c2edd9caed8f11ada3f8a33ccd626e5e4\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c7ee886_ad3d_482d_a9e1_1b83fb44ccc8.slice\": RecentStats: unable to find data in memory cache]" Dec 02 17:01:57 crc kubenswrapper[4747]: I1202 17:01:57.154610 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:01:57 crc kubenswrapper[4747]: E1202 17:01:57.154852 4747 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 17:01:57 crc kubenswrapper[4747]: E1202 17:01:57.154886 4747 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 17:01:57 crc kubenswrapper[4747]: E1202 17:01:57.154971 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift podName:6251344f-5dcf-4cc8-ae89-85d58abaf41b nodeName:}" failed. No retries permitted until 2025-12-02 17:02:13.15494869 +0000 UTC m=+1163.681837439 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift") pod "swift-storage-0" (UID: "6251344f-5dcf-4cc8-ae89-85d58abaf41b") : configmap "swift-ring-files" not found Dec 02 17:01:57 crc kubenswrapper[4747]: I1202 17:01:57.774816 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8" path="/var/lib/kubelet/pods/5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8/volumes" Dec 02 17:01:57 crc kubenswrapper[4747]: I1202 17:01:57.815045 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-w5brp"] Dec 02 17:01:57 crc kubenswrapper[4747]: E1202 17:01:57.815417 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8" containerName="init" Dec 02 17:01:57 crc kubenswrapper[4747]: I1202 17:01:57.815438 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8" containerName="init" Dec 02 17:01:57 crc kubenswrapper[4747]: E1202 17:01:57.815451 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8" containerName="dnsmasq-dns" Dec 02 17:01:57 crc kubenswrapper[4747]: I1202 17:01:57.815459 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8" containerName="dnsmasq-dns" Dec 02 17:01:57 crc kubenswrapper[4747]: I1202 17:01:57.815690 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c7ee886-ad3d-482d-a9e1-1b83fb44ccc8" containerName="dnsmasq-dns" Dec 02 17:01:57 crc kubenswrapper[4747]: I1202 17:01:57.816349 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-w5brp" Dec 02 17:01:57 crc kubenswrapper[4747]: I1202 17:01:57.825800 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-w5brp"] Dec 02 17:01:57 crc kubenswrapper[4747]: I1202 17:01:57.968576 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nm7wh\" (UniqueName: \"kubernetes.io/projected/98c89af3-bb13-47c8-b976-01425ab50970-kube-api-access-nm7wh\") pod \"keystone-db-create-w5brp\" (UID: \"98c89af3-bb13-47c8-b976-01425ab50970\") " pod="openstack/keystone-db-create-w5brp" Dec 02 17:01:57 crc kubenswrapper[4747]: I1202 17:01:57.986958 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-ltp7l"] Dec 02 17:01:57 crc kubenswrapper[4747]: I1202 17:01:57.990844 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-ltp7l" Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.015787 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-ltp7l"] Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.071012 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lx76h\" (UniqueName: \"kubernetes.io/projected/b8a0631b-2dd4-49ec-895d-2423641e28f7-kube-api-access-lx76h\") pod \"placement-db-create-ltp7l\" (UID: \"b8a0631b-2dd4-49ec-895d-2423641e28f7\") " pod="openstack/placement-db-create-ltp7l" Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.071210 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nm7wh\" (UniqueName: \"kubernetes.io/projected/98c89af3-bb13-47c8-b976-01425ab50970-kube-api-access-nm7wh\") pod \"keystone-db-create-w5brp\" (UID: \"98c89af3-bb13-47c8-b976-01425ab50970\") " pod="openstack/keystone-db-create-w5brp" Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.102678 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nm7wh\" (UniqueName: \"kubernetes.io/projected/98c89af3-bb13-47c8-b976-01425ab50970-kube-api-access-nm7wh\") pod \"keystone-db-create-w5brp\" (UID: \"98c89af3-bb13-47c8-b976-01425ab50970\") " pod="openstack/keystone-db-create-w5brp" Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.141295 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-w5brp" Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.182808 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lx76h\" (UniqueName: \"kubernetes.io/projected/b8a0631b-2dd4-49ec-895d-2423641e28f7-kube-api-access-lx76h\") pod \"placement-db-create-ltp7l\" (UID: \"b8a0631b-2dd4-49ec-895d-2423641e28f7\") " pod="openstack/placement-db-create-ltp7l" Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.210796 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lx76h\" (UniqueName: \"kubernetes.io/projected/b8a0631b-2dd4-49ec-895d-2423641e28f7-kube-api-access-lx76h\") pod \"placement-db-create-ltp7l\" (UID: \"b8a0631b-2dd4-49ec-895d-2423641e28f7\") " pod="openstack/placement-db-create-ltp7l" Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.318277 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-ltp7l" Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.379783 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-5jbl9"] Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.380891 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5jbl9" Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.390685 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-5jbl9"] Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.493582 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dftbx\" (UniqueName: \"kubernetes.io/projected/b95920cc-6bf6-4182-b9c2-514a5348e275-kube-api-access-dftbx\") pod \"glance-db-create-5jbl9\" (UID: \"b95920cc-6bf6-4182-b9c2-514a5348e275\") " pod="openstack/glance-db-create-5jbl9" Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.595993 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dftbx\" (UniqueName: \"kubernetes.io/projected/b95920cc-6bf6-4182-b9c2-514a5348e275-kube-api-access-dftbx\") pod \"glance-db-create-5jbl9\" (UID: \"b95920cc-6bf6-4182-b9c2-514a5348e275\") " pod="openstack/glance-db-create-5jbl9" Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.617777 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dftbx\" (UniqueName: \"kubernetes.io/projected/b95920cc-6bf6-4182-b9c2-514a5348e275-kube-api-access-dftbx\") pod \"glance-db-create-5jbl9\" (UID: \"b95920cc-6bf6-4182-b9c2-514a5348e275\") " pod="openstack/glance-db-create-5jbl9" Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.623254 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-w5brp"] Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.714424 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5jbl9" Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.720255 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-w5brp" event={"ID":"98c89af3-bb13-47c8-b976-01425ab50970","Type":"ContainerStarted","Data":"cb3919d61e2326c16f1f2d6666ea061c163b1b7a712c32e071c6465f86606fbe"} Dec 02 17:01:58 crc kubenswrapper[4747]: I1202 17:01:58.815738 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-ltp7l"] Dec 02 17:01:58 crc kubenswrapper[4747]: W1202 17:01:58.817601 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8a0631b_2dd4_49ec_895d_2423641e28f7.slice/crio-b14154e4b785ae1eca950c749b4f201206c7b23f7ec9568f437fcb09001f099e WatchSource:0}: Error finding container b14154e4b785ae1eca950c749b4f201206c7b23f7ec9568f437fcb09001f099e: Status 404 returned error can't find the container with id b14154e4b785ae1eca950c749b4f201206c7b23f7ec9568f437fcb09001f099e Dec 02 17:01:59 crc kubenswrapper[4747]: I1202 17:01:59.142463 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-5jbl9"] Dec 02 17:01:59 crc kubenswrapper[4747]: W1202 17:01:59.145170 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb95920cc_6bf6_4182_b9c2_514a5348e275.slice/crio-b8a18d38507f1a7044a6762185a0bf63a519891b64ec7e014b93fc92f3e167ce WatchSource:0}: Error finding container b8a18d38507f1a7044a6762185a0bf63a519891b64ec7e014b93fc92f3e167ce: Status 404 returned error can't find the container with id b8a18d38507f1a7044a6762185a0bf63a519891b64ec7e014b93fc92f3e167ce Dec 02 17:01:59 crc kubenswrapper[4747]: I1202 17:01:59.730443 4747 generic.go:334] "Generic (PLEG): container finished" podID="b8a0631b-2dd4-49ec-895d-2423641e28f7" containerID="c2b6241b271b4486d7c17b097eeb5279c76e81e3bcc078689890e85063a4a3c7" exitCode=0 Dec 02 17:01:59 crc kubenswrapper[4747]: I1202 17:01:59.730714 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-ltp7l" event={"ID":"b8a0631b-2dd4-49ec-895d-2423641e28f7","Type":"ContainerDied","Data":"c2b6241b271b4486d7c17b097eeb5279c76e81e3bcc078689890e85063a4a3c7"} Dec 02 17:01:59 crc kubenswrapper[4747]: I1202 17:01:59.730743 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-ltp7l" event={"ID":"b8a0631b-2dd4-49ec-895d-2423641e28f7","Type":"ContainerStarted","Data":"b14154e4b785ae1eca950c749b4f201206c7b23f7ec9568f437fcb09001f099e"} Dec 02 17:01:59 crc kubenswrapper[4747]: I1202 17:01:59.732831 4747 generic.go:334] "Generic (PLEG): container finished" podID="b95920cc-6bf6-4182-b9c2-514a5348e275" containerID="296eaf44b3c23bd18a39ab14fc9f9e0d7453a34181c3f74f755554daf5d57f70" exitCode=0 Dec 02 17:01:59 crc kubenswrapper[4747]: I1202 17:01:59.732874 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-5jbl9" event={"ID":"b95920cc-6bf6-4182-b9c2-514a5348e275","Type":"ContainerDied","Data":"296eaf44b3c23bd18a39ab14fc9f9e0d7453a34181c3f74f755554daf5d57f70"} Dec 02 17:01:59 crc kubenswrapper[4747]: I1202 17:01:59.732893 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-5jbl9" event={"ID":"b95920cc-6bf6-4182-b9c2-514a5348e275","Type":"ContainerStarted","Data":"b8a18d38507f1a7044a6762185a0bf63a519891b64ec7e014b93fc92f3e167ce"} Dec 02 17:01:59 crc kubenswrapper[4747]: I1202 17:01:59.734064 4747 generic.go:334] "Generic (PLEG): container finished" podID="98c89af3-bb13-47c8-b976-01425ab50970" containerID="3be40e0899dc07eb1d41f9d34c3bb5ed5165bd3be8a01e08b55b8a19220cd297" exitCode=0 Dec 02 17:01:59 crc kubenswrapper[4747]: I1202 17:01:59.734092 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-w5brp" event={"ID":"98c89af3-bb13-47c8-b976-01425ab50970","Type":"ContainerDied","Data":"3be40e0899dc07eb1d41f9d34c3bb5ed5165bd3be8a01e08b55b8a19220cd297"} Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.147445 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5jbl9" Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.219318 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-w5brp" Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.224599 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-ltp7l" Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.239241 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dftbx\" (UniqueName: \"kubernetes.io/projected/b95920cc-6bf6-4182-b9c2-514a5348e275-kube-api-access-dftbx\") pod \"b95920cc-6bf6-4182-b9c2-514a5348e275\" (UID: \"b95920cc-6bf6-4182-b9c2-514a5348e275\") " Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.249691 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b95920cc-6bf6-4182-b9c2-514a5348e275-kube-api-access-dftbx" (OuterVolumeSpecName: "kube-api-access-dftbx") pod "b95920cc-6bf6-4182-b9c2-514a5348e275" (UID: "b95920cc-6bf6-4182-b9c2-514a5348e275"). InnerVolumeSpecName "kube-api-access-dftbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.340786 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lx76h\" (UniqueName: \"kubernetes.io/projected/b8a0631b-2dd4-49ec-895d-2423641e28f7-kube-api-access-lx76h\") pod \"b8a0631b-2dd4-49ec-895d-2423641e28f7\" (UID: \"b8a0631b-2dd4-49ec-895d-2423641e28f7\") " Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.341199 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nm7wh\" (UniqueName: \"kubernetes.io/projected/98c89af3-bb13-47c8-b976-01425ab50970-kube-api-access-nm7wh\") pod \"98c89af3-bb13-47c8-b976-01425ab50970\" (UID: \"98c89af3-bb13-47c8-b976-01425ab50970\") " Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.341641 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dftbx\" (UniqueName: \"kubernetes.io/projected/b95920cc-6bf6-4182-b9c2-514a5348e275-kube-api-access-dftbx\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.344175 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8a0631b-2dd4-49ec-895d-2423641e28f7-kube-api-access-lx76h" (OuterVolumeSpecName: "kube-api-access-lx76h") pod "b8a0631b-2dd4-49ec-895d-2423641e28f7" (UID: "b8a0631b-2dd4-49ec-895d-2423641e28f7"). InnerVolumeSpecName "kube-api-access-lx76h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.344635 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98c89af3-bb13-47c8-b976-01425ab50970-kube-api-access-nm7wh" (OuterVolumeSpecName: "kube-api-access-nm7wh") pod "98c89af3-bb13-47c8-b976-01425ab50970" (UID: "98c89af3-bb13-47c8-b976-01425ab50970"). InnerVolumeSpecName "kube-api-access-nm7wh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.443067 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nm7wh\" (UniqueName: \"kubernetes.io/projected/98c89af3-bb13-47c8-b976-01425ab50970-kube-api-access-nm7wh\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.443112 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lx76h\" (UniqueName: \"kubernetes.io/projected/b8a0631b-2dd4-49ec-895d-2423641e28f7-kube-api-access-lx76h\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.758551 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-ltp7l" event={"ID":"b8a0631b-2dd4-49ec-895d-2423641e28f7","Type":"ContainerDied","Data":"b14154e4b785ae1eca950c749b4f201206c7b23f7ec9568f437fcb09001f099e"} Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.758943 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b14154e4b785ae1eca950c749b4f201206c7b23f7ec9568f437fcb09001f099e" Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.758639 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-ltp7l" Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.760424 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5jbl9" Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.761671 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-w5brp" Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.776496 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-5jbl9" event={"ID":"b95920cc-6bf6-4182-b9c2-514a5348e275","Type":"ContainerDied","Data":"b8a18d38507f1a7044a6762185a0bf63a519891b64ec7e014b93fc92f3e167ce"} Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.776550 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8a18d38507f1a7044a6762185a0bf63a519891b64ec7e014b93fc92f3e167ce" Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.776568 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-w5brp" event={"ID":"98c89af3-bb13-47c8-b976-01425ab50970","Type":"ContainerDied","Data":"cb3919d61e2326c16f1f2d6666ea061c163b1b7a712c32e071c6465f86606fbe"} Dec 02 17:02:01 crc kubenswrapper[4747]: I1202 17:02:01.776581 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb3919d61e2326c16f1f2d6666ea061c163b1b7a712c32e071c6465f86606fbe" Dec 02 17:02:02 crc kubenswrapper[4747]: I1202 17:02:02.776134 4747 generic.go:334] "Generic (PLEG): container finished" podID="f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b" containerID="4c0614a1275d1c527b0ea25e258a63c5d0659a975e9129585d2f0a409b8ffa02" exitCode=0 Dec 02 17:02:02 crc kubenswrapper[4747]: I1202 17:02:02.776251 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-mbqqh" event={"ID":"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b","Type":"ContainerDied","Data":"4c0614a1275d1c527b0ea25e258a63c5d0659a975e9129585d2f0a409b8ffa02"} Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.173223 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.297336 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-scripts\") pod \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.297449 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-etc-swift\") pod \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.297489 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9btf\" (UniqueName: \"kubernetes.io/projected/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-kube-api-access-t9btf\") pod \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.297578 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-combined-ca-bundle\") pod \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.297621 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-swiftconf\") pod \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.297721 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-ring-data-devices\") pod \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.297796 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-dispersionconf\") pod \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\" (UID: \"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b\") " Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.298746 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b" (UID: "f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.298793 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b" (UID: "f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.302706 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-kube-api-access-t9btf" (OuterVolumeSpecName: "kube-api-access-t9btf") pod "f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b" (UID: "f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b"). InnerVolumeSpecName "kube-api-access-t9btf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.305611 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b" (UID: "f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.319838 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b" (UID: "f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.324669 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b" (UID: "f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.332546 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-scripts" (OuterVolumeSpecName: "scripts") pod "f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b" (UID: "f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.400047 4747 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.400246 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.400304 4747 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.400399 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9btf\" (UniqueName: \"kubernetes.io/projected/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-kube-api-access-t9btf\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.400459 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.400515 4747 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.400568 4747 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.794726 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-mbqqh" event={"ID":"f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b","Type":"ContainerDied","Data":"581ae87a383c40d8bd4a4dea35e4fe05c7884ef8281be7fa230c59fcba73de3c"} Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.794779 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="581ae87a383c40d8bd4a4dea35e4fe05c7884ef8281be7fa230c59fcba73de3c" Dec 02 17:02:04 crc kubenswrapper[4747]: I1202 17:02:04.794858 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mbqqh" Dec 02 17:02:06 crc kubenswrapper[4747]: I1202 17:02:06.223393 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.060536 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-0c4c-account-create-kfhgj"] Dec 02 17:02:08 crc kubenswrapper[4747]: E1202 17:02:08.061400 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98c89af3-bb13-47c8-b976-01425ab50970" containerName="mariadb-database-create" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.061423 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="98c89af3-bb13-47c8-b976-01425ab50970" containerName="mariadb-database-create" Dec 02 17:02:08 crc kubenswrapper[4747]: E1202 17:02:08.061437 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b95920cc-6bf6-4182-b9c2-514a5348e275" containerName="mariadb-database-create" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.061446 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b95920cc-6bf6-4182-b9c2-514a5348e275" containerName="mariadb-database-create" Dec 02 17:02:08 crc kubenswrapper[4747]: E1202 17:02:08.061475 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b" containerName="swift-ring-rebalance" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.061485 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b" containerName="swift-ring-rebalance" Dec 02 17:02:08 crc kubenswrapper[4747]: E1202 17:02:08.061499 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8a0631b-2dd4-49ec-895d-2423641e28f7" containerName="mariadb-database-create" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.061507 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8a0631b-2dd4-49ec-895d-2423641e28f7" containerName="mariadb-database-create" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.061719 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b" containerName="swift-ring-rebalance" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.061735 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b95920cc-6bf6-4182-b9c2-514a5348e275" containerName="mariadb-database-create" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.061754 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8a0631b-2dd4-49ec-895d-2423641e28f7" containerName="mariadb-database-create" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.061772 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="98c89af3-bb13-47c8-b976-01425ab50970" containerName="mariadb-database-create" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.062594 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-0c4c-account-create-kfhgj" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.064976 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.071553 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-0c4c-account-create-kfhgj"] Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.169434 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq84m\" (UniqueName: \"kubernetes.io/projected/3ca820aa-feda-455b-9462-da25b82261e5-kube-api-access-sq84m\") pod \"keystone-0c4c-account-create-kfhgj\" (UID: \"3ca820aa-feda-455b-9462-da25b82261e5\") " pod="openstack/keystone-0c4c-account-create-kfhgj" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.245739 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-153d-account-create-xtgz6"] Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.247197 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-153d-account-create-xtgz6" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.249799 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.262032 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-153d-account-create-xtgz6"] Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.271525 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq84m\" (UniqueName: \"kubernetes.io/projected/3ca820aa-feda-455b-9462-da25b82261e5-kube-api-access-sq84m\") pod \"keystone-0c4c-account-create-kfhgj\" (UID: \"3ca820aa-feda-455b-9462-da25b82261e5\") " pod="openstack/keystone-0c4c-account-create-kfhgj" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.298542 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq84m\" (UniqueName: \"kubernetes.io/projected/3ca820aa-feda-455b-9462-da25b82261e5-kube-api-access-sq84m\") pod \"keystone-0c4c-account-create-kfhgj\" (UID: \"3ca820aa-feda-455b-9462-da25b82261e5\") " pod="openstack/keystone-0c4c-account-create-kfhgj" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.373238 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dv9b\" (UniqueName: \"kubernetes.io/projected/7c03aa83-6eec-4627-8fd9-a05bbba2c70c-kube-api-access-5dv9b\") pod \"placement-153d-account-create-xtgz6\" (UID: \"7c03aa83-6eec-4627-8fd9-a05bbba2c70c\") " pod="openstack/placement-153d-account-create-xtgz6" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.389650 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-0c4c-account-create-kfhgj" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.457575 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-58f5-account-create-ffwtb"] Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.458941 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-58f5-account-create-ffwtb" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.461410 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.474500 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-58f5-account-create-ffwtb"] Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.474829 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dv9b\" (UniqueName: \"kubernetes.io/projected/7c03aa83-6eec-4627-8fd9-a05bbba2c70c-kube-api-access-5dv9b\") pod \"placement-153d-account-create-xtgz6\" (UID: \"7c03aa83-6eec-4627-8fd9-a05bbba2c70c\") " pod="openstack/placement-153d-account-create-xtgz6" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.496109 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dv9b\" (UniqueName: \"kubernetes.io/projected/7c03aa83-6eec-4627-8fd9-a05bbba2c70c-kube-api-access-5dv9b\") pod \"placement-153d-account-create-xtgz6\" (UID: \"7c03aa83-6eec-4627-8fd9-a05bbba2c70c\") " pod="openstack/placement-153d-account-create-xtgz6" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.565773 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-153d-account-create-xtgz6" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.576959 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mhbh\" (UniqueName: \"kubernetes.io/projected/4aa0901c-2ff9-478e-b4f2-b76fe0914cae-kube-api-access-7mhbh\") pod \"glance-58f5-account-create-ffwtb\" (UID: \"4aa0901c-2ff9-478e-b4f2-b76fe0914cae\") " pod="openstack/glance-58f5-account-create-ffwtb" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.678868 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mhbh\" (UniqueName: \"kubernetes.io/projected/4aa0901c-2ff9-478e-b4f2-b76fe0914cae-kube-api-access-7mhbh\") pod \"glance-58f5-account-create-ffwtb\" (UID: \"4aa0901c-2ff9-478e-b4f2-b76fe0914cae\") " pod="openstack/glance-58f5-account-create-ffwtb" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.700527 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mhbh\" (UniqueName: \"kubernetes.io/projected/4aa0901c-2ff9-478e-b4f2-b76fe0914cae-kube-api-access-7mhbh\") pod \"glance-58f5-account-create-ffwtb\" (UID: \"4aa0901c-2ff9-478e-b4f2-b76fe0914cae\") " pod="openstack/glance-58f5-account-create-ffwtb" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.841037 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-58f5-account-create-ffwtb" Dec 02 17:02:08 crc kubenswrapper[4747]: I1202 17:02:08.911093 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-0c4c-account-create-kfhgj"] Dec 02 17:02:08 crc kubenswrapper[4747]: W1202 17:02:08.922418 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ca820aa_feda_455b_9462_da25b82261e5.slice/crio-42ae202b58879b818ee90baa6d007f1ffaca20459a44b02bf8d03977b4b70771 WatchSource:0}: Error finding container 42ae202b58879b818ee90baa6d007f1ffaca20459a44b02bf8d03977b4b70771: Status 404 returned error can't find the container with id 42ae202b58879b818ee90baa6d007f1ffaca20459a44b02bf8d03977b4b70771 Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.003321 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-153d-account-create-xtgz6"] Dec 02 17:02:09 crc kubenswrapper[4747]: W1202 17:02:09.011680 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c03aa83_6eec_4627_8fd9_a05bbba2c70c.slice/crio-a25388269deb2bc570a5feeef778664dd042bbfad2f8dbd2303c18c5e0d63675 WatchSource:0}: Error finding container a25388269deb2bc570a5feeef778664dd042bbfad2f8dbd2303c18c5e0d63675: Status 404 returned error can't find the container with id a25388269deb2bc570a5feeef778664dd042bbfad2f8dbd2303c18c5e0d63675 Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.132613 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-lqt7r" podUID="ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020" containerName="ovn-controller" probeResult="failure" output=< Dec 02 17:02:09 crc kubenswrapper[4747]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 02 17:02:09 crc kubenswrapper[4747]: > Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.161598 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.162528 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-hbnrp" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.280758 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-58f5-account-create-ffwtb"] Dec 02 17:02:09 crc kubenswrapper[4747]: W1202 17:02:09.313664 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4aa0901c_2ff9_478e_b4f2_b76fe0914cae.slice/crio-d86d65b07942f17f757e23f29b30c72bc49e455249621f702cc038e7eae02e38 WatchSource:0}: Error finding container d86d65b07942f17f757e23f29b30c72bc49e455249621f702cc038e7eae02e38: Status 404 returned error can't find the container with id d86d65b07942f17f757e23f29b30c72bc49e455249621f702cc038e7eae02e38 Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.395470 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-lqt7r-config-ptlph"] Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.396974 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.400235 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.404592 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lqt7r-config-ptlph"] Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.512462 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-run\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.512865 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8ff7f499-e0d7-45f2-a55f-99ccff1af207-additional-scripts\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.513016 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-run-ovn\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.513202 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-log-ovn\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.513348 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8ff7f499-e0d7-45f2-a55f-99ccff1af207-scripts\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.513484 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lwnk\" (UniqueName: \"kubernetes.io/projected/8ff7f499-e0d7-45f2-a55f-99ccff1af207-kube-api-access-8lwnk\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.615223 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-run\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.615300 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8ff7f499-e0d7-45f2-a55f-99ccff1af207-additional-scripts\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.615349 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-run-ovn\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.615381 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-log-ovn\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.615406 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8ff7f499-e0d7-45f2-a55f-99ccff1af207-scripts\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.615440 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lwnk\" (UniqueName: \"kubernetes.io/projected/8ff7f499-e0d7-45f2-a55f-99ccff1af207-kube-api-access-8lwnk\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.615556 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-run\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.615634 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-log-ovn\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.615671 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-run-ovn\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.616389 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8ff7f499-e0d7-45f2-a55f-99ccff1af207-additional-scripts\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.617491 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8ff7f499-e0d7-45f2-a55f-99ccff1af207-scripts\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.634413 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lwnk\" (UniqueName: \"kubernetes.io/projected/8ff7f499-e0d7-45f2-a55f-99ccff1af207-kube-api-access-8lwnk\") pod \"ovn-controller-lqt7r-config-ptlph\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.773048 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.839344 4747 generic.go:334] "Generic (PLEG): container finished" podID="3ca820aa-feda-455b-9462-da25b82261e5" containerID="8e078537ea2cc73e2b2a00eaac9899af0dd22b96c4c792ac720a4ececd968c78" exitCode=0 Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.839724 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-0c4c-account-create-kfhgj" event={"ID":"3ca820aa-feda-455b-9462-da25b82261e5","Type":"ContainerDied","Data":"8e078537ea2cc73e2b2a00eaac9899af0dd22b96c4c792ac720a4ececd968c78"} Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.839764 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-0c4c-account-create-kfhgj" event={"ID":"3ca820aa-feda-455b-9462-da25b82261e5","Type":"ContainerStarted","Data":"42ae202b58879b818ee90baa6d007f1ffaca20459a44b02bf8d03977b4b70771"} Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.842156 4747 generic.go:334] "Generic (PLEG): container finished" podID="4aa0901c-2ff9-478e-b4f2-b76fe0914cae" containerID="e5becf69fbc53012b191926c11d9f5eb03767088d56b00a888e3eb02f5cec38a" exitCode=0 Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.842230 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-58f5-account-create-ffwtb" event={"ID":"4aa0901c-2ff9-478e-b4f2-b76fe0914cae","Type":"ContainerDied","Data":"e5becf69fbc53012b191926c11d9f5eb03767088d56b00a888e3eb02f5cec38a"} Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.842262 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-58f5-account-create-ffwtb" event={"ID":"4aa0901c-2ff9-478e-b4f2-b76fe0914cae","Type":"ContainerStarted","Data":"d86d65b07942f17f757e23f29b30c72bc49e455249621f702cc038e7eae02e38"} Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.843885 4747 generic.go:334] "Generic (PLEG): container finished" podID="7c03aa83-6eec-4627-8fd9-a05bbba2c70c" containerID="fe8ff0e6516ddf121c20eb6a803fdb239a61d9e9a4c55527a7139665124ff206" exitCode=0 Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.844791 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-153d-account-create-xtgz6" event={"ID":"7c03aa83-6eec-4627-8fd9-a05bbba2c70c","Type":"ContainerDied","Data":"fe8ff0e6516ddf121c20eb6a803fdb239a61d9e9a4c55527a7139665124ff206"} Dec 02 17:02:09 crc kubenswrapper[4747]: I1202 17:02:09.844893 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-153d-account-create-xtgz6" event={"ID":"7c03aa83-6eec-4627-8fd9-a05bbba2c70c","Type":"ContainerStarted","Data":"a25388269deb2bc570a5feeef778664dd042bbfad2f8dbd2303c18c5e0d63675"} Dec 02 17:02:10 crc kubenswrapper[4747]: I1202 17:02:10.255608 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lqt7r-config-ptlph"] Dec 02 17:02:10 crc kubenswrapper[4747]: W1202 17:02:10.259041 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ff7f499_e0d7_45f2_a55f_99ccff1af207.slice/crio-e2948d8ccbed0ef3c26c8ce338c1b4d58cdbbe9387eb4ff08af7692e3a64aad9 WatchSource:0}: Error finding container e2948d8ccbed0ef3c26c8ce338c1b4d58cdbbe9387eb4ff08af7692e3a64aad9: Status 404 returned error can't find the container with id e2948d8ccbed0ef3c26c8ce338c1b4d58cdbbe9387eb4ff08af7692e3a64aad9 Dec 02 17:02:10 crc kubenswrapper[4747]: I1202 17:02:10.859004 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lqt7r-config-ptlph" event={"ID":"8ff7f499-e0d7-45f2-a55f-99ccff1af207","Type":"ContainerStarted","Data":"c34cdfa4c2ef66123fb72191c37c13e6ca03627a6b78dc273fef5849dfd87324"} Dec 02 17:02:10 crc kubenswrapper[4747]: I1202 17:02:10.859597 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lqt7r-config-ptlph" event={"ID":"8ff7f499-e0d7-45f2-a55f-99ccff1af207","Type":"ContainerStarted","Data":"e2948d8ccbed0ef3c26c8ce338c1b4d58cdbbe9387eb4ff08af7692e3a64aad9"} Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.429504 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-58f5-account-create-ffwtb" Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.436045 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-0c4c-account-create-kfhgj" Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.461178 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-153d-account-create-xtgz6" Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.558642 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sq84m\" (UniqueName: \"kubernetes.io/projected/3ca820aa-feda-455b-9462-da25b82261e5-kube-api-access-sq84m\") pod \"3ca820aa-feda-455b-9462-da25b82261e5\" (UID: \"3ca820aa-feda-455b-9462-da25b82261e5\") " Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.559069 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mhbh\" (UniqueName: \"kubernetes.io/projected/4aa0901c-2ff9-478e-b4f2-b76fe0914cae-kube-api-access-7mhbh\") pod \"4aa0901c-2ff9-478e-b4f2-b76fe0914cae\" (UID: \"4aa0901c-2ff9-478e-b4f2-b76fe0914cae\") " Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.559106 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5dv9b\" (UniqueName: \"kubernetes.io/projected/7c03aa83-6eec-4627-8fd9-a05bbba2c70c-kube-api-access-5dv9b\") pod \"7c03aa83-6eec-4627-8fd9-a05bbba2c70c\" (UID: \"7c03aa83-6eec-4627-8fd9-a05bbba2c70c\") " Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.564877 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ca820aa-feda-455b-9462-da25b82261e5-kube-api-access-sq84m" (OuterVolumeSpecName: "kube-api-access-sq84m") pod "3ca820aa-feda-455b-9462-da25b82261e5" (UID: "3ca820aa-feda-455b-9462-da25b82261e5"). InnerVolumeSpecName "kube-api-access-sq84m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.565013 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4aa0901c-2ff9-478e-b4f2-b76fe0914cae-kube-api-access-7mhbh" (OuterVolumeSpecName: "kube-api-access-7mhbh") pod "4aa0901c-2ff9-478e-b4f2-b76fe0914cae" (UID: "4aa0901c-2ff9-478e-b4f2-b76fe0914cae"). InnerVolumeSpecName "kube-api-access-7mhbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.566628 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c03aa83-6eec-4627-8fd9-a05bbba2c70c-kube-api-access-5dv9b" (OuterVolumeSpecName: "kube-api-access-5dv9b") pod "7c03aa83-6eec-4627-8fd9-a05bbba2c70c" (UID: "7c03aa83-6eec-4627-8fd9-a05bbba2c70c"). InnerVolumeSpecName "kube-api-access-5dv9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.661163 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sq84m\" (UniqueName: \"kubernetes.io/projected/3ca820aa-feda-455b-9462-da25b82261e5-kube-api-access-sq84m\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.661204 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mhbh\" (UniqueName: \"kubernetes.io/projected/4aa0901c-2ff9-478e-b4f2-b76fe0914cae-kube-api-access-7mhbh\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.661213 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5dv9b\" (UniqueName: \"kubernetes.io/projected/7c03aa83-6eec-4627-8fd9-a05bbba2c70c-kube-api-access-5dv9b\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.867494 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-153d-account-create-xtgz6" event={"ID":"7c03aa83-6eec-4627-8fd9-a05bbba2c70c","Type":"ContainerDied","Data":"a25388269deb2bc570a5feeef778664dd042bbfad2f8dbd2303c18c5e0d63675"} Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.867554 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a25388269deb2bc570a5feeef778664dd042bbfad2f8dbd2303c18c5e0d63675" Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.868517 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-153d-account-create-xtgz6" Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.871834 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-0c4c-account-create-kfhgj" Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.871876 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-0c4c-account-create-kfhgj" event={"ID":"3ca820aa-feda-455b-9462-da25b82261e5","Type":"ContainerDied","Data":"42ae202b58879b818ee90baa6d007f1ffaca20459a44b02bf8d03977b4b70771"} Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.871931 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42ae202b58879b818ee90baa6d007f1ffaca20459a44b02bf8d03977b4b70771" Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.875253 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-58f5-account-create-ffwtb" Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.875308 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-58f5-account-create-ffwtb" event={"ID":"4aa0901c-2ff9-478e-b4f2-b76fe0914cae","Type":"ContainerDied","Data":"d86d65b07942f17f757e23f29b30c72bc49e455249621f702cc038e7eae02e38"} Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.875382 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d86d65b07942f17f757e23f29b30c72bc49e455249621f702cc038e7eae02e38" Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.878136 4747 generic.go:334] "Generic (PLEG): container finished" podID="8ff7f499-e0d7-45f2-a55f-99ccff1af207" containerID="c34cdfa4c2ef66123fb72191c37c13e6ca03627a6b78dc273fef5849dfd87324" exitCode=0 Dec 02 17:02:11 crc kubenswrapper[4747]: I1202 17:02:11.878174 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lqt7r-config-ptlph" event={"ID":"8ff7f499-e0d7-45f2-a55f-99ccff1af207","Type":"ContainerDied","Data":"c34cdfa4c2ef66123fb72191c37c13e6ca03627a6b78dc273fef5849dfd87324"} Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.199617 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.205938 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6251344f-5dcf-4cc8-ae89-85d58abaf41b-etc-swift\") pod \"swift-storage-0\" (UID: \"6251344f-5dcf-4cc8-ae89-85d58abaf41b\") " pod="openstack/swift-storage-0" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.278707 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.402650 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8ff7f499-e0d7-45f2-a55f-99ccff1af207-scripts\") pod \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.402768 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-run-ovn\") pod \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.402807 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8ff7f499-e0d7-45f2-a55f-99ccff1af207-additional-scripts\") pod \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.402844 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lwnk\" (UniqueName: \"kubernetes.io/projected/8ff7f499-e0d7-45f2-a55f-99ccff1af207-kube-api-access-8lwnk\") pod \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.403078 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-run\") pod \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.403105 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-log-ovn\") pod \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\" (UID: \"8ff7f499-e0d7-45f2-a55f-99ccff1af207\") " Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.403676 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "8ff7f499-e0d7-45f2-a55f-99ccff1af207" (UID: "8ff7f499-e0d7-45f2-a55f-99ccff1af207"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.404048 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "8ff7f499-e0d7-45f2-a55f-99ccff1af207" (UID: "8ff7f499-e0d7-45f2-a55f-99ccff1af207"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.404586 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ff7f499-e0d7-45f2-a55f-99ccff1af207-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "8ff7f499-e0d7-45f2-a55f-99ccff1af207" (UID: "8ff7f499-e0d7-45f2-a55f-99ccff1af207"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.404647 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-run" (OuterVolumeSpecName: "var-run") pod "8ff7f499-e0d7-45f2-a55f-99ccff1af207" (UID: "8ff7f499-e0d7-45f2-a55f-99ccff1af207"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.404993 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ff7f499-e0d7-45f2-a55f-99ccff1af207-scripts" (OuterVolumeSpecName: "scripts") pod "8ff7f499-e0d7-45f2-a55f-99ccff1af207" (UID: "8ff7f499-e0d7-45f2-a55f-99ccff1af207"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.408218 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ff7f499-e0d7-45f2-a55f-99ccff1af207-kube-api-access-8lwnk" (OuterVolumeSpecName: "kube-api-access-8lwnk") pod "8ff7f499-e0d7-45f2-a55f-99ccff1af207" (UID: "8ff7f499-e0d7-45f2-a55f-99ccff1af207"). InnerVolumeSpecName "kube-api-access-8lwnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.413790 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.505215 4747 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.505538 4747 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/8ff7f499-e0d7-45f2-a55f-99ccff1af207-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.505548 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lwnk\" (UniqueName: \"kubernetes.io/projected/8ff7f499-e0d7-45f2-a55f-99ccff1af207-kube-api-access-8lwnk\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.505558 4747 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-run\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.505567 4747 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8ff7f499-e0d7-45f2-a55f-99ccff1af207-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.505575 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8ff7f499-e0d7-45f2-a55f-99ccff1af207-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.599034 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-hkz2r"] Dec 02 17:02:13 crc kubenswrapper[4747]: E1202 17:02:13.599491 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ca820aa-feda-455b-9462-da25b82261e5" containerName="mariadb-account-create" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.599514 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ca820aa-feda-455b-9462-da25b82261e5" containerName="mariadb-account-create" Dec 02 17:02:13 crc kubenswrapper[4747]: E1202 17:02:13.599530 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c03aa83-6eec-4627-8fd9-a05bbba2c70c" containerName="mariadb-account-create" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.599536 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c03aa83-6eec-4627-8fd9-a05bbba2c70c" containerName="mariadb-account-create" Dec 02 17:02:13 crc kubenswrapper[4747]: E1202 17:02:13.599555 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4aa0901c-2ff9-478e-b4f2-b76fe0914cae" containerName="mariadb-account-create" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.599561 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="4aa0901c-2ff9-478e-b4f2-b76fe0914cae" containerName="mariadb-account-create" Dec 02 17:02:13 crc kubenswrapper[4747]: E1202 17:02:13.599579 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ff7f499-e0d7-45f2-a55f-99ccff1af207" containerName="ovn-config" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.599587 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ff7f499-e0d7-45f2-a55f-99ccff1af207" containerName="ovn-config" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.599763 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ff7f499-e0d7-45f2-a55f-99ccff1af207" containerName="ovn-config" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.599779 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c03aa83-6eec-4627-8fd9-a05bbba2c70c" containerName="mariadb-account-create" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.599789 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="4aa0901c-2ff9-478e-b4f2-b76fe0914cae" containerName="mariadb-account-create" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.599803 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ca820aa-feda-455b-9462-da25b82261e5" containerName="mariadb-account-create" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.600513 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hkz2r" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.602627 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.602751 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-5ddkh" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.616076 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-hkz2r"] Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.708309 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-combined-ca-bundle\") pod \"glance-db-sync-hkz2r\" (UID: \"ba0dc134-3984-4156-8198-7880a8616b44\") " pod="openstack/glance-db-sync-hkz2r" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.708379 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kb58\" (UniqueName: \"kubernetes.io/projected/ba0dc134-3984-4156-8198-7880a8616b44-kube-api-access-6kb58\") pod \"glance-db-sync-hkz2r\" (UID: \"ba0dc134-3984-4156-8198-7880a8616b44\") " pod="openstack/glance-db-sync-hkz2r" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.708401 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-config-data\") pod \"glance-db-sync-hkz2r\" (UID: \"ba0dc134-3984-4156-8198-7880a8616b44\") " pod="openstack/glance-db-sync-hkz2r" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.708442 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-db-sync-config-data\") pod \"glance-db-sync-hkz2r\" (UID: \"ba0dc134-3984-4156-8198-7880a8616b44\") " pod="openstack/glance-db-sync-hkz2r" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.809994 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-config-data\") pod \"glance-db-sync-hkz2r\" (UID: \"ba0dc134-3984-4156-8198-7880a8616b44\") " pod="openstack/glance-db-sync-hkz2r" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.810143 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-db-sync-config-data\") pod \"glance-db-sync-hkz2r\" (UID: \"ba0dc134-3984-4156-8198-7880a8616b44\") " pod="openstack/glance-db-sync-hkz2r" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.810372 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-combined-ca-bundle\") pod \"glance-db-sync-hkz2r\" (UID: \"ba0dc134-3984-4156-8198-7880a8616b44\") " pod="openstack/glance-db-sync-hkz2r" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.810459 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kb58\" (UniqueName: \"kubernetes.io/projected/ba0dc134-3984-4156-8198-7880a8616b44-kube-api-access-6kb58\") pod \"glance-db-sync-hkz2r\" (UID: \"ba0dc134-3984-4156-8198-7880a8616b44\") " pod="openstack/glance-db-sync-hkz2r" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.814982 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-config-data\") pod \"glance-db-sync-hkz2r\" (UID: \"ba0dc134-3984-4156-8198-7880a8616b44\") " pod="openstack/glance-db-sync-hkz2r" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.815355 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-combined-ca-bundle\") pod \"glance-db-sync-hkz2r\" (UID: \"ba0dc134-3984-4156-8198-7880a8616b44\") " pod="openstack/glance-db-sync-hkz2r" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.820240 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-db-sync-config-data\") pod \"glance-db-sync-hkz2r\" (UID: \"ba0dc134-3984-4156-8198-7880a8616b44\") " pod="openstack/glance-db-sync-hkz2r" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.828284 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kb58\" (UniqueName: \"kubernetes.io/projected/ba0dc134-3984-4156-8198-7880a8616b44-kube-api-access-6kb58\") pod \"glance-db-sync-hkz2r\" (UID: \"ba0dc134-3984-4156-8198-7880a8616b44\") " pod="openstack/glance-db-sync-hkz2r" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.903567 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lqt7r-config-ptlph" event={"ID":"8ff7f499-e0d7-45f2-a55f-99ccff1af207","Type":"ContainerDied","Data":"e2948d8ccbed0ef3c26c8ce338c1b4d58cdbbe9387eb4ff08af7692e3a64aad9"} Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.903622 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2948d8ccbed0ef3c26c8ce338c1b4d58cdbbe9387eb4ff08af7692e3a64aad9" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.903646 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lqt7r-config-ptlph" Dec 02 17:02:13 crc kubenswrapper[4747]: I1202 17:02:13.925196 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hkz2r" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.078469 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.085610 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-lqt7r" Dec 02 17:02:14 crc kubenswrapper[4747]: W1202 17:02:14.097965 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6251344f_5dcf_4cc8_ae89_85d58abaf41b.slice/crio-b6985dde964ec96d95ae954274403aa1f18ed99902eb1e3dfbbb59b665f2d892 WatchSource:0}: Error finding container b6985dde964ec96d95ae954274403aa1f18ed99902eb1e3dfbbb59b665f2d892: Status 404 returned error can't find the container with id b6985dde964ec96d95ae954274403aa1f18ed99902eb1e3dfbbb59b665f2d892 Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.380420 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-lqt7r-config-ptlph"] Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.387524 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-lqt7r-config-ptlph"] Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.449040 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-lqt7r-config-h7grk"] Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.450161 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.452755 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.460858 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lqt7r-config-h7grk"] Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.513851 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-hkz2r"] Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.630376 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-run\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.630786 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-scripts\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.630813 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-additional-scripts\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.630883 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-log-ovn\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.630928 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-run-ovn\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.642838 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9l9kl\" (UniqueName: \"kubernetes.io/projected/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-kube-api-access-9l9kl\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.744438 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-run\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.744523 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-scripts\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.744548 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-additional-scripts\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.744609 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-log-ovn\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.744631 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-run-ovn\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.744715 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9l9kl\" (UniqueName: \"kubernetes.io/projected/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-kube-api-access-9l9kl\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.745407 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-run\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.747410 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-scripts\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.747855 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-additional-scripts\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.747940 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-log-ovn\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.747999 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-run-ovn\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.775836 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9l9kl\" (UniqueName: \"kubernetes.io/projected/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-kube-api-access-9l9kl\") pod \"ovn-controller-lqt7r-config-h7grk\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.917645 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6251344f-5dcf-4cc8-ae89-85d58abaf41b","Type":"ContainerStarted","Data":"b6985dde964ec96d95ae954274403aa1f18ed99902eb1e3dfbbb59b665f2d892"} Dec 02 17:02:14 crc kubenswrapper[4747]: I1202 17:02:14.919711 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hkz2r" event={"ID":"ba0dc134-3984-4156-8198-7880a8616b44","Type":"ContainerStarted","Data":"55934d18a36454bd0533b1d80e4d2594a427484538848bceef0fa8e08a948ca5"} Dec 02 17:02:15 crc kubenswrapper[4747]: I1202 17:02:15.074799 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:15 crc kubenswrapper[4747]: I1202 17:02:15.519588 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lqt7r-config-h7grk"] Dec 02 17:02:15 crc kubenswrapper[4747]: W1202 17:02:15.532264 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod050a5e0f_7dce_4e7e_aea9_ceb25db5d71a.slice/crio-5ee4e8941a8917904ac2cae0ad0b8a4865578dbb483b086acd35fd518b519faa WatchSource:0}: Error finding container 5ee4e8941a8917904ac2cae0ad0b8a4865578dbb483b086acd35fd518b519faa: Status 404 returned error can't find the container with id 5ee4e8941a8917904ac2cae0ad0b8a4865578dbb483b086acd35fd518b519faa Dec 02 17:02:15 crc kubenswrapper[4747]: I1202 17:02:15.769090 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ff7f499-e0d7-45f2-a55f-99ccff1af207" path="/var/lib/kubelet/pods/8ff7f499-e0d7-45f2-a55f-99ccff1af207/volumes" Dec 02 17:02:15 crc kubenswrapper[4747]: I1202 17:02:15.932626 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6251344f-5dcf-4cc8-ae89-85d58abaf41b","Type":"ContainerStarted","Data":"c6a6a7c36565095da8bc79db984296cd4790cf9f7a766598a8f3321f894e3ad9"} Dec 02 17:02:15 crc kubenswrapper[4747]: I1202 17:02:15.932702 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6251344f-5dcf-4cc8-ae89-85d58abaf41b","Type":"ContainerStarted","Data":"36e912afd36f7dbc5f3065765c4b4388173527ca76da5b18488e71a97b6dd9f8"} Dec 02 17:02:15 crc kubenswrapper[4747]: I1202 17:02:15.936017 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lqt7r-config-h7grk" event={"ID":"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a","Type":"ContainerStarted","Data":"a84d174276f460143d5b1c3ca544fa3eac2ff5e300c56e29fa38cd0957f89aba"} Dec 02 17:02:15 crc kubenswrapper[4747]: I1202 17:02:15.936204 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lqt7r-config-h7grk" event={"ID":"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a","Type":"ContainerStarted","Data":"5ee4e8941a8917904ac2cae0ad0b8a4865578dbb483b086acd35fd518b519faa"} Dec 02 17:02:15 crc kubenswrapper[4747]: I1202 17:02:15.968542 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-lqt7r-config-h7grk" podStartSLOduration=1.968517692 podStartE2EDuration="1.968517692s" podCreationTimestamp="2025-12-02 17:02:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:02:15.968346588 +0000 UTC m=+1166.495235327" watchObservedRunningTime="2025-12-02 17:02:15.968517692 +0000 UTC m=+1166.495406441" Dec 02 17:02:16 crc kubenswrapper[4747]: I1202 17:02:16.949252 4747 generic.go:334] "Generic (PLEG): container finished" podID="050a5e0f-7dce-4e7e-aea9-ceb25db5d71a" containerID="a84d174276f460143d5b1c3ca544fa3eac2ff5e300c56e29fa38cd0957f89aba" exitCode=0 Dec 02 17:02:16 crc kubenswrapper[4747]: I1202 17:02:16.949322 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lqt7r-config-h7grk" event={"ID":"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a","Type":"ContainerDied","Data":"a84d174276f460143d5b1c3ca544fa3eac2ff5e300c56e29fa38cd0957f89aba"} Dec 02 17:02:16 crc kubenswrapper[4747]: I1202 17:02:16.952637 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6251344f-5dcf-4cc8-ae89-85d58abaf41b","Type":"ContainerStarted","Data":"9534aac78e9fa8974cdb15fdd8f3fbdf2e64ee998c8a64c3b76856fec3cf5a09"} Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.000858 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6251344f-5dcf-4cc8-ae89-85d58abaf41b","Type":"ContainerStarted","Data":"036348cbc0779daea3ea3beff17b5fd5227189cb719aa17584cedd55a4ff934b"} Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.458348 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.622500 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-log-ovn\") pod \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.622610 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-scripts\") pod \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.622629 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "050a5e0f-7dce-4e7e-aea9-ceb25db5d71a" (UID: "050a5e0f-7dce-4e7e-aea9-ceb25db5d71a"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.622719 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-additional-scripts\") pod \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.622798 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9l9kl\" (UniqueName: \"kubernetes.io/projected/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-kube-api-access-9l9kl\") pod \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.622878 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-run\") pod \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.622952 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-run-ovn\") pod \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\" (UID: \"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a\") " Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.623049 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-run" (OuterVolumeSpecName: "var-run") pod "050a5e0f-7dce-4e7e-aea9-ceb25db5d71a" (UID: "050a5e0f-7dce-4e7e-aea9-ceb25db5d71a"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.623145 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "050a5e0f-7dce-4e7e-aea9-ceb25db5d71a" (UID: "050a5e0f-7dce-4e7e-aea9-ceb25db5d71a"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.623480 4747 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.623505 4747 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-run\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.623517 4747 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.623557 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "050a5e0f-7dce-4e7e-aea9-ceb25db5d71a" (UID: "050a5e0f-7dce-4e7e-aea9-ceb25db5d71a"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.623931 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-scripts" (OuterVolumeSpecName: "scripts") pod "050a5e0f-7dce-4e7e-aea9-ceb25db5d71a" (UID: "050a5e0f-7dce-4e7e-aea9-ceb25db5d71a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.630813 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-kube-api-access-9l9kl" (OuterVolumeSpecName: "kube-api-access-9l9kl") pod "050a5e0f-7dce-4e7e-aea9-ceb25db5d71a" (UID: "050a5e0f-7dce-4e7e-aea9-ceb25db5d71a"). InnerVolumeSpecName "kube-api-access-9l9kl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.724708 4747 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.725185 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9l9kl\" (UniqueName: \"kubernetes.io/projected/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-kube-api-access-9l9kl\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:18 crc kubenswrapper[4747]: I1202 17:02:18.725201 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:19 crc kubenswrapper[4747]: I1202 17:02:19.029636 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lqt7r-config-h7grk" event={"ID":"050a5e0f-7dce-4e7e-aea9-ceb25db5d71a","Type":"ContainerDied","Data":"5ee4e8941a8917904ac2cae0ad0b8a4865578dbb483b086acd35fd518b519faa"} Dec 02 17:02:19 crc kubenswrapper[4747]: I1202 17:02:19.029686 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ee4e8941a8917904ac2cae0ad0b8a4865578dbb483b086acd35fd518b519faa" Dec 02 17:02:19 crc kubenswrapper[4747]: I1202 17:02:19.029708 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lqt7r-config-h7grk" Dec 02 17:02:19 crc kubenswrapper[4747]: I1202 17:02:19.052028 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-lqt7r-config-h7grk"] Dec 02 17:02:19 crc kubenswrapper[4747]: I1202 17:02:19.062881 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-lqt7r-config-h7grk"] Dec 02 17:02:19 crc kubenswrapper[4747]: I1202 17:02:19.774165 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="050a5e0f-7dce-4e7e-aea9-ceb25db5d71a" path="/var/lib/kubelet/pods/050a5e0f-7dce-4e7e-aea9-ceb25db5d71a/volumes" Dec 02 17:02:20 crc kubenswrapper[4747]: I1202 17:02:20.040837 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6251344f-5dcf-4cc8-ae89-85d58abaf41b","Type":"ContainerStarted","Data":"0984f9404434a4ab515bc0da228dd1880d5682772df3eac1af9577852ce0daab"} Dec 02 17:02:20 crc kubenswrapper[4747]: I1202 17:02:20.040885 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6251344f-5dcf-4cc8-ae89-85d58abaf41b","Type":"ContainerStarted","Data":"c4960d32800abb27b07588ceb3600744fc8428e038df9f5e8b2ce008d82ced0b"} Dec 02 17:02:21 crc kubenswrapper[4747]: I1202 17:02:21.056778 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6251344f-5dcf-4cc8-ae89-85d58abaf41b","Type":"ContainerStarted","Data":"1797950e4978b724ceef1b831ad7e59f0e516593d744353b62ce625f1b782edf"} Dec 02 17:02:21 crc kubenswrapper[4747]: I1202 17:02:21.057109 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6251344f-5dcf-4cc8-ae89-85d58abaf41b","Type":"ContainerStarted","Data":"f3b75cf307eed07b826f8aeeaaea7156d45ee4e24aa30bb55e6f939df3d8af90"} Dec 02 17:02:23 crc kubenswrapper[4747]: I1202 17:02:23.073561 4747 generic.go:334] "Generic (PLEG): container finished" podID="84019d16-aa94-40fc-9615-45c7d3dcb7b3" containerID="7267e15b3d012b4499dfc73930d556fa9134fa3b5d4966724b95da81951561e9" exitCode=0 Dec 02 17:02:23 crc kubenswrapper[4747]: I1202 17:02:23.073794 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"84019d16-aa94-40fc-9615-45c7d3dcb7b3","Type":"ContainerDied","Data":"7267e15b3d012b4499dfc73930d556fa9134fa3b5d4966724b95da81951561e9"} Dec 02 17:02:23 crc kubenswrapper[4747]: I1202 17:02:23.082446 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6251344f-5dcf-4cc8-ae89-85d58abaf41b","Type":"ContainerStarted","Data":"88af7fd11543885c95ab815f3ec5b062e1f9005d31789b64cae5d4694b59c242"} Dec 02 17:02:23 crc kubenswrapper[4747]: I1202 17:02:23.082818 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6251344f-5dcf-4cc8-ae89-85d58abaf41b","Type":"ContainerStarted","Data":"80f7ee76fb8249c842a0de2201138b8c1715f7c540e644b375444ae304347bf7"} Dec 02 17:02:23 crc kubenswrapper[4747]: I1202 17:02:23.082831 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6251344f-5dcf-4cc8-ae89-85d58abaf41b","Type":"ContainerStarted","Data":"332ff6744b81cc9754f526a6fc8648aa2d6c1fcf0700b317e541117594a86cf5"} Dec 02 17:02:24 crc kubenswrapper[4747]: I1202 17:02:24.091341 4747 generic.go:334] "Generic (PLEG): container finished" podID="1e1d5029-90ff-4315-8ba4-961286afbb54" containerID="92a581541edb61fddec32c7a8654bf8df5774ac48809da7ae01b0513d57d6723" exitCode=0 Dec 02 17:02:24 crc kubenswrapper[4747]: I1202 17:02:24.091402 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1e1d5029-90ff-4315-8ba4-961286afbb54","Type":"ContainerDied","Data":"92a581541edb61fddec32c7a8654bf8df5774ac48809da7ae01b0513d57d6723"} Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.163416 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1e1d5029-90ff-4315-8ba4-961286afbb54","Type":"ContainerStarted","Data":"aa833de0b6ed7ef6b94022a24ff29bef00ec6d77c118291379bb3bd57001aff5"} Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.165067 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.166914 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"84019d16-aa94-40fc-9615-45c7d3dcb7b3","Type":"ContainerStarted","Data":"925fd0a24be60f6c05b52e5e85786f38ce9e346d6d05cc7f3f7a87f1bb5f50ec"} Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.167341 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.177953 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6251344f-5dcf-4cc8-ae89-85d58abaf41b","Type":"ContainerStarted","Data":"f9bd73093aac2fa4897ca934b2cbf2883190b6200535b992abcd10bc7cf9f17b"} Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.177987 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6251344f-5dcf-4cc8-ae89-85d58abaf41b","Type":"ContainerStarted","Data":"2e56bdf884884047dd424f5e7a08dc88d2dfb98c1420b20bb83c43523597c547"} Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.177998 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6251344f-5dcf-4cc8-ae89-85d58abaf41b","Type":"ContainerStarted","Data":"140cbf66dd243457891bfe2867b38b26d3e9adc31cabaef2d6370762156e0210"} Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.178009 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6251344f-5dcf-4cc8-ae89-85d58abaf41b","Type":"ContainerStarted","Data":"fd3c046d1d972812c7d82ad22f8fa4956e97a7f2e799b66dc732bd2e8fb98586"} Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.184837 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hkz2r" event={"ID":"ba0dc134-3984-4156-8198-7880a8616b44","Type":"ContainerStarted","Data":"6558a067452c1230d859787756352a8f9cfc89ce456eed74d70ce9f6c9fa8025"} Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.195944 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371948.658854 podStartE2EDuration="1m28.195922444s" podCreationTimestamp="2025-12-02 17:01:03 +0000 UTC" firstStartedPulling="2025-12-02 17:01:05.607979591 +0000 UTC m=+1096.134868340" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:02:31.186379203 +0000 UTC m=+1181.713267952" watchObservedRunningTime="2025-12-02 17:02:31.195922444 +0000 UTC m=+1181.722811203" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.230193 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=43.310385236 podStartE2EDuration="51.230172186s" podCreationTimestamp="2025-12-02 17:01:40 +0000 UTC" firstStartedPulling="2025-12-02 17:02:14.120578567 +0000 UTC m=+1164.647467316" lastFinishedPulling="2025-12-02 17:02:22.040365517 +0000 UTC m=+1172.567254266" observedRunningTime="2025-12-02 17:02:31.225746181 +0000 UTC m=+1181.752634930" watchObservedRunningTime="2025-12-02 17:02:31.230172186 +0000 UTC m=+1181.757060935" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.260605 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-hkz2r" podStartSLOduration=2.689475135 podStartE2EDuration="18.26058784s" podCreationTimestamp="2025-12-02 17:02:13 +0000 UTC" firstStartedPulling="2025-12-02 17:02:14.543263987 +0000 UTC m=+1165.070152736" lastFinishedPulling="2025-12-02 17:02:30.114376682 +0000 UTC m=+1180.641265441" observedRunningTime="2025-12-02 17:02:31.257237585 +0000 UTC m=+1181.784126334" watchObservedRunningTime="2025-12-02 17:02:31.26058784 +0000 UTC m=+1181.787476589" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.507006 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=52.335571901 podStartE2EDuration="1m28.506985224s" podCreationTimestamp="2025-12-02 17:01:03 +0000 UTC" firstStartedPulling="2025-12-02 17:01:05.897750267 +0000 UTC m=+1096.424639016" lastFinishedPulling="2025-12-02 17:01:42.06916359 +0000 UTC m=+1132.596052339" observedRunningTime="2025-12-02 17:02:31.283450039 +0000 UTC m=+1181.810338788" watchObservedRunningTime="2025-12-02 17:02:31.506985224 +0000 UTC m=+1182.033873973" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.509765 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-pq929"] Dec 02 17:02:31 crc kubenswrapper[4747]: E1202 17:02:31.510105 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="050a5e0f-7dce-4e7e-aea9-ceb25db5d71a" containerName="ovn-config" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.510121 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="050a5e0f-7dce-4e7e-aea9-ceb25db5d71a" containerName="ovn-config" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.510319 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="050a5e0f-7dce-4e7e-aea9-ceb25db5d71a" containerName="ovn-config" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.511192 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.517608 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.518680 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.518878 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.519052 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.519089 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.519144 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsn9f\" (UniqueName: \"kubernetes.io/projected/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-kube-api-access-tsn9f\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.519274 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-config\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.534325 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-pq929"] Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.621047 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.621141 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.621170 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.621210 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsn9f\" (UniqueName: \"kubernetes.io/projected/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-kube-api-access-tsn9f\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.621244 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-config\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.621306 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.622267 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.622796 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.623341 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.623892 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-config\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.624449 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.642790 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsn9f\" (UniqueName: \"kubernetes.io/projected/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-kube-api-access-tsn9f\") pod \"dnsmasq-dns-77585f5f8c-pq929\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:31 crc kubenswrapper[4747]: I1202 17:02:31.845428 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:32 crc kubenswrapper[4747]: I1202 17:02:32.383472 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-pq929"] Dec 02 17:02:33 crc kubenswrapper[4747]: I1202 17:02:33.203659 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-pq929" event={"ID":"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7","Type":"ContainerStarted","Data":"b7a1f573a815835e269e7377e92ff7dde2c8e09d6fbcd6ef864f345c03ea0bce"} Dec 02 17:02:34 crc kubenswrapper[4747]: I1202 17:02:34.213176 4747 generic.go:334] "Generic (PLEG): container finished" podID="2f1929de-e8ed-41a1-b8a5-e947b2ce22e7" containerID="44856576264b0f4fd85137ff919e55d55c15a42436dc529a3fc8e1261a09442e" exitCode=0 Dec 02 17:02:34 crc kubenswrapper[4747]: I1202 17:02:34.213268 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-pq929" event={"ID":"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7","Type":"ContainerDied","Data":"44856576264b0f4fd85137ff919e55d55c15a42436dc529a3fc8e1261a09442e"} Dec 02 17:02:35 crc kubenswrapper[4747]: I1202 17:02:35.226626 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-pq929" event={"ID":"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7","Type":"ContainerStarted","Data":"f62ef644898daa8e6c778da393cb7a04f71b8b12718b54f1fe528d9be108ccf1"} Dec 02 17:02:35 crc kubenswrapper[4747]: I1202 17:02:35.227179 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:35 crc kubenswrapper[4747]: I1202 17:02:35.278070 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77585f5f8c-pq929" podStartSLOduration=4.278042567 podStartE2EDuration="4.278042567s" podCreationTimestamp="2025-12-02 17:02:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:02:35.266813598 +0000 UTC m=+1185.793702357" watchObservedRunningTime="2025-12-02 17:02:35.278042567 +0000 UTC m=+1185.804931336" Dec 02 17:02:39 crc kubenswrapper[4747]: I1202 17:02:39.264048 4747 generic.go:334] "Generic (PLEG): container finished" podID="ba0dc134-3984-4156-8198-7880a8616b44" containerID="6558a067452c1230d859787756352a8f9cfc89ce456eed74d70ce9f6c9fa8025" exitCode=0 Dec 02 17:02:39 crc kubenswrapper[4747]: I1202 17:02:39.264253 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hkz2r" event={"ID":"ba0dc134-3984-4156-8198-7880a8616b44","Type":"ContainerDied","Data":"6558a067452c1230d859787756352a8f9cfc89ce456eed74d70ce9f6c9fa8025"} Dec 02 17:02:40 crc kubenswrapper[4747]: I1202 17:02:40.737054 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hkz2r" Dec 02 17:02:40 crc kubenswrapper[4747]: I1202 17:02:40.859458 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-combined-ca-bundle\") pod \"ba0dc134-3984-4156-8198-7880a8616b44\" (UID: \"ba0dc134-3984-4156-8198-7880a8616b44\") " Dec 02 17:02:40 crc kubenswrapper[4747]: I1202 17:02:40.859554 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-config-data\") pod \"ba0dc134-3984-4156-8198-7880a8616b44\" (UID: \"ba0dc134-3984-4156-8198-7880a8616b44\") " Dec 02 17:02:40 crc kubenswrapper[4747]: I1202 17:02:40.859792 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kb58\" (UniqueName: \"kubernetes.io/projected/ba0dc134-3984-4156-8198-7880a8616b44-kube-api-access-6kb58\") pod \"ba0dc134-3984-4156-8198-7880a8616b44\" (UID: \"ba0dc134-3984-4156-8198-7880a8616b44\") " Dec 02 17:02:40 crc kubenswrapper[4747]: I1202 17:02:40.859834 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-db-sync-config-data\") pod \"ba0dc134-3984-4156-8198-7880a8616b44\" (UID: \"ba0dc134-3984-4156-8198-7880a8616b44\") " Dec 02 17:02:40 crc kubenswrapper[4747]: I1202 17:02:40.867085 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "ba0dc134-3984-4156-8198-7880a8616b44" (UID: "ba0dc134-3984-4156-8198-7880a8616b44"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:02:40 crc kubenswrapper[4747]: I1202 17:02:40.868352 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba0dc134-3984-4156-8198-7880a8616b44-kube-api-access-6kb58" (OuterVolumeSpecName: "kube-api-access-6kb58") pod "ba0dc134-3984-4156-8198-7880a8616b44" (UID: "ba0dc134-3984-4156-8198-7880a8616b44"). InnerVolumeSpecName "kube-api-access-6kb58". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:02:40 crc kubenswrapper[4747]: I1202 17:02:40.887949 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ba0dc134-3984-4156-8198-7880a8616b44" (UID: "ba0dc134-3984-4156-8198-7880a8616b44"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:02:40 crc kubenswrapper[4747]: I1202 17:02:40.909381 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-config-data" (OuterVolumeSpecName: "config-data") pod "ba0dc134-3984-4156-8198-7880a8616b44" (UID: "ba0dc134-3984-4156-8198-7880a8616b44"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:02:40 crc kubenswrapper[4747]: I1202 17:02:40.962200 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:40 crc kubenswrapper[4747]: I1202 17:02:40.962633 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:40 crc kubenswrapper[4747]: I1202 17:02:40.962681 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kb58\" (UniqueName: \"kubernetes.io/projected/ba0dc134-3984-4156-8198-7880a8616b44-kube-api-access-6kb58\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:40 crc kubenswrapper[4747]: I1202 17:02:40.962698 4747 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ba0dc134-3984-4156-8198-7880a8616b44-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.282680 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-hkz2r" event={"ID":"ba0dc134-3984-4156-8198-7880a8616b44","Type":"ContainerDied","Data":"55934d18a36454bd0533b1d80e4d2594a427484538848bceef0fa8e08a948ca5"} Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.282950 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55934d18a36454bd0533b1d80e4d2594a427484538848bceef0fa8e08a948ca5" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.282790 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-hkz2r" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.728240 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-pq929"] Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.728716 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77585f5f8c-pq929" podUID="2f1929de-e8ed-41a1-b8a5-e947b2ce22e7" containerName="dnsmasq-dns" containerID="cri-o://f62ef644898daa8e6c778da393cb7a04f71b8b12718b54f1fe528d9be108ccf1" gracePeriod=10 Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.730073 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.779205 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-7cgwn"] Dec 02 17:02:41 crc kubenswrapper[4747]: E1202 17:02:41.779668 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba0dc134-3984-4156-8198-7880a8616b44" containerName="glance-db-sync" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.779683 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba0dc134-3984-4156-8198-7880a8616b44" containerName="glance-db-sync" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.779948 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba0dc134-3984-4156-8198-7880a8616b44" containerName="glance-db-sync" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.780940 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.791290 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-7cgwn"] Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.846163 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-pq929" podUID="2f1929de-e8ed-41a1-b8a5-e947b2ce22e7" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: connect: connection refused" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.880581 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.880685 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.880738 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.880823 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-config\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.880843 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.983218 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-config\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.983262 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.983300 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.983342 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.983382 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.983430 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wggzt\" (UniqueName: \"kubernetes.io/projected/6137dd81-a215-4037-ade0-f3b2ddb4573b-kube-api-access-wggzt\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.984645 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-config\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.984677 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.984679 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.985040 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:41 crc kubenswrapper[4747]: I1202 17:02:41.985274 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.084885 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wggzt\" (UniqueName: \"kubernetes.io/projected/6137dd81-a215-4037-ade0-f3b2ddb4573b-kube-api-access-wggzt\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.121800 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wggzt\" (UniqueName: \"kubernetes.io/projected/6137dd81-a215-4037-ade0-f3b2ddb4573b-kube-api-access-wggzt\") pod \"dnsmasq-dns-7ff5475cc9-7cgwn\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.197779 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.295238 4747 generic.go:334] "Generic (PLEG): container finished" podID="2f1929de-e8ed-41a1-b8a5-e947b2ce22e7" containerID="f62ef644898daa8e6c778da393cb7a04f71b8b12718b54f1fe528d9be108ccf1" exitCode=0 Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.295300 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-pq929" event={"ID":"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7","Type":"ContainerDied","Data":"f62ef644898daa8e6c778da393cb7a04f71b8b12718b54f1fe528d9be108ccf1"} Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.295335 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-pq929" event={"ID":"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7","Type":"ContainerDied","Data":"b7a1f573a815835e269e7377e92ff7dde2c8e09d6fbcd6ef864f345c03ea0bce"} Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.295351 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7a1f573a815835e269e7377e92ff7dde2c8e09d6fbcd6ef864f345c03ea0bce" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.337775 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.490269 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-config\") pod \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.490437 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-ovsdbserver-nb\") pod \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.490489 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsn9f\" (UniqueName: \"kubernetes.io/projected/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-kube-api-access-tsn9f\") pod \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.490618 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-dns-swift-storage-0\") pod \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.490712 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-dns-svc\") pod \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.490761 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-ovsdbserver-sb\") pod \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\" (UID: \"2f1929de-e8ed-41a1-b8a5-e947b2ce22e7\") " Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.497411 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-kube-api-access-tsn9f" (OuterVolumeSpecName: "kube-api-access-tsn9f") pod "2f1929de-e8ed-41a1-b8a5-e947b2ce22e7" (UID: "2f1929de-e8ed-41a1-b8a5-e947b2ce22e7"). InnerVolumeSpecName "kube-api-access-tsn9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.535400 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2f1929de-e8ed-41a1-b8a5-e947b2ce22e7" (UID: "2f1929de-e8ed-41a1-b8a5-e947b2ce22e7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.537156 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2f1929de-e8ed-41a1-b8a5-e947b2ce22e7" (UID: "2f1929de-e8ed-41a1-b8a5-e947b2ce22e7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.539842 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-config" (OuterVolumeSpecName: "config") pod "2f1929de-e8ed-41a1-b8a5-e947b2ce22e7" (UID: "2f1929de-e8ed-41a1-b8a5-e947b2ce22e7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.540975 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2f1929de-e8ed-41a1-b8a5-e947b2ce22e7" (UID: "2f1929de-e8ed-41a1-b8a5-e947b2ce22e7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.541990 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2f1929de-e8ed-41a1-b8a5-e947b2ce22e7" (UID: "2f1929de-e8ed-41a1-b8a5-e947b2ce22e7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.592770 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.592806 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.592817 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.592830 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.592838 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.592850 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsn9f\" (UniqueName: \"kubernetes.io/projected/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7-kube-api-access-tsn9f\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:42 crc kubenswrapper[4747]: I1202 17:02:42.635182 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-7cgwn"] Dec 02 17:02:42 crc kubenswrapper[4747]: W1202 17:02:42.637442 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6137dd81_a215_4037_ade0_f3b2ddb4573b.slice/crio-b953c288c475fa6453218f9e24df16c16a4344bfa430c1ec5fdee7665e56e9a2 WatchSource:0}: Error finding container b953c288c475fa6453218f9e24df16c16a4344bfa430c1ec5fdee7665e56e9a2: Status 404 returned error can't find the container with id b953c288c475fa6453218f9e24df16c16a4344bfa430c1ec5fdee7665e56e9a2 Dec 02 17:02:43 crc kubenswrapper[4747]: I1202 17:02:43.332074 4747 generic.go:334] "Generic (PLEG): container finished" podID="6137dd81-a215-4037-ade0-f3b2ddb4573b" containerID="b535c9317f227705f8479c998e9627ee826c457ee4786e28f6dcf0694deaf271" exitCode=0 Dec 02 17:02:43 crc kubenswrapper[4747]: I1202 17:02:43.332179 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" event={"ID":"6137dd81-a215-4037-ade0-f3b2ddb4573b","Type":"ContainerDied","Data":"b535c9317f227705f8479c998e9627ee826c457ee4786e28f6dcf0694deaf271"} Dec 02 17:02:43 crc kubenswrapper[4747]: I1202 17:02:43.332477 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" event={"ID":"6137dd81-a215-4037-ade0-f3b2ddb4573b","Type":"ContainerStarted","Data":"b953c288c475fa6453218f9e24df16c16a4344bfa430c1ec5fdee7665e56e9a2"} Dec 02 17:02:43 crc kubenswrapper[4747]: I1202 17:02:43.332500 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-pq929" Dec 02 17:02:43 crc kubenswrapper[4747]: I1202 17:02:43.530347 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-pq929"] Dec 02 17:02:43 crc kubenswrapper[4747]: I1202 17:02:43.536744 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-pq929"] Dec 02 17:02:43 crc kubenswrapper[4747]: I1202 17:02:43.770869 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f1929de-e8ed-41a1-b8a5-e947b2ce22e7" path="/var/lib/kubelet/pods/2f1929de-e8ed-41a1-b8a5-e947b2ce22e7/volumes" Dec 02 17:02:44 crc kubenswrapper[4747]: I1202 17:02:44.344459 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" event={"ID":"6137dd81-a215-4037-ade0-f3b2ddb4573b","Type":"ContainerStarted","Data":"1cf16f0c75df1a85e8936e861c0d671f1208acbbe1bc7af03fd232faea770196"} Dec 02 17:02:44 crc kubenswrapper[4747]: I1202 17:02:44.344978 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:44 crc kubenswrapper[4747]: I1202 17:02:44.369077 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" podStartSLOduration=3.369045992 podStartE2EDuration="3.369045992s" podCreationTimestamp="2025-12-02 17:02:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:02:44.366991094 +0000 UTC m=+1194.893879853" watchObservedRunningTime="2025-12-02 17:02:44.369045992 +0000 UTC m=+1194.895934751" Dec 02 17:02:44 crc kubenswrapper[4747]: I1202 17:02:44.949155 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.059136 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.374894 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-l8kgp"] Dec 02 17:02:45 crc kubenswrapper[4747]: E1202 17:02:45.376062 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f1929de-e8ed-41a1-b8a5-e947b2ce22e7" containerName="dnsmasq-dns" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.376146 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f1929de-e8ed-41a1-b8a5-e947b2ce22e7" containerName="dnsmasq-dns" Dec 02 17:02:45 crc kubenswrapper[4747]: E1202 17:02:45.376234 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f1929de-e8ed-41a1-b8a5-e947b2ce22e7" containerName="init" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.376295 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f1929de-e8ed-41a1-b8a5-e947b2ce22e7" containerName="init" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.376548 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f1929de-e8ed-41a1-b8a5-e947b2ce22e7" containerName="dnsmasq-dns" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.377255 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-l8kgp" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.396934 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-l8kgp"] Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.457662 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpkrw\" (UniqueName: \"kubernetes.io/projected/16809698-14ec-49bf-81cf-a4aab9e6dd62-kube-api-access-jpkrw\") pod \"cinder-db-create-l8kgp\" (UID: \"16809698-14ec-49bf-81cf-a4aab9e6dd62\") " pod="openstack/cinder-db-create-l8kgp" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.471219 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-nbl78"] Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.472322 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nbl78" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.560246 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7r99c\" (UniqueName: \"kubernetes.io/projected/6bf036a6-0cd6-4f08-a963-bb63f45d14a6-kube-api-access-7r99c\") pod \"barbican-db-create-nbl78\" (UID: \"6bf036a6-0cd6-4f08-a963-bb63f45d14a6\") " pod="openstack/barbican-db-create-nbl78" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.560604 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpkrw\" (UniqueName: \"kubernetes.io/projected/16809698-14ec-49bf-81cf-a4aab9e6dd62-kube-api-access-jpkrw\") pod \"cinder-db-create-l8kgp\" (UID: \"16809698-14ec-49bf-81cf-a4aab9e6dd62\") " pod="openstack/cinder-db-create-l8kgp" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.586881 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-nbl78"] Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.599171 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpkrw\" (UniqueName: \"kubernetes.io/projected/16809698-14ec-49bf-81cf-a4aab9e6dd62-kube-api-access-jpkrw\") pod \"cinder-db-create-l8kgp\" (UID: \"16809698-14ec-49bf-81cf-a4aab9e6dd62\") " pod="openstack/cinder-db-create-l8kgp" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.655350 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-fpmlm"] Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.656657 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fpmlm" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.662828 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7r99c\" (UniqueName: \"kubernetes.io/projected/6bf036a6-0cd6-4f08-a963-bb63f45d14a6-kube-api-access-7r99c\") pod \"barbican-db-create-nbl78\" (UID: \"6bf036a6-0cd6-4f08-a963-bb63f45d14a6\") " pod="openstack/barbican-db-create-nbl78" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.663711 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.663965 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.664091 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.664349 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6zr9j" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.667279 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-fpmlm"] Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.684558 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7r99c\" (UniqueName: \"kubernetes.io/projected/6bf036a6-0cd6-4f08-a963-bb63f45d14a6-kube-api-access-7r99c\") pod \"barbican-db-create-nbl78\" (UID: \"6bf036a6-0cd6-4f08-a963-bb63f45d14a6\") " pod="openstack/barbican-db-create-nbl78" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.706663 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-l8kgp" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.764997 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vkzl\" (UniqueName: \"kubernetes.io/projected/58d94d7e-5759-47cf-9920-db9d5fa862b3-kube-api-access-8vkzl\") pod \"keystone-db-sync-fpmlm\" (UID: \"58d94d7e-5759-47cf-9920-db9d5fa862b3\") " pod="openstack/keystone-db-sync-fpmlm" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.765074 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58d94d7e-5759-47cf-9920-db9d5fa862b3-config-data\") pod \"keystone-db-sync-fpmlm\" (UID: \"58d94d7e-5759-47cf-9920-db9d5fa862b3\") " pod="openstack/keystone-db-sync-fpmlm" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.765108 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58d94d7e-5759-47cf-9920-db9d5fa862b3-combined-ca-bundle\") pod \"keystone-db-sync-fpmlm\" (UID: \"58d94d7e-5759-47cf-9920-db9d5fa862b3\") " pod="openstack/keystone-db-sync-fpmlm" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.784798 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-8xlw7"] Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.785831 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-8xlw7" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.790649 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nbl78" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.793124 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-8xlw7"] Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.866525 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vkzl\" (UniqueName: \"kubernetes.io/projected/58d94d7e-5759-47cf-9920-db9d5fa862b3-kube-api-access-8vkzl\") pod \"keystone-db-sync-fpmlm\" (UID: \"58d94d7e-5759-47cf-9920-db9d5fa862b3\") " pod="openstack/keystone-db-sync-fpmlm" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.866613 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhfch\" (UniqueName: \"kubernetes.io/projected/63d1c8ec-685f-46b7-859f-b32d265f3504-kube-api-access-rhfch\") pod \"neutron-db-create-8xlw7\" (UID: \"63d1c8ec-685f-46b7-859f-b32d265f3504\") " pod="openstack/neutron-db-create-8xlw7" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.866682 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58d94d7e-5759-47cf-9920-db9d5fa862b3-config-data\") pod \"keystone-db-sync-fpmlm\" (UID: \"58d94d7e-5759-47cf-9920-db9d5fa862b3\") " pod="openstack/keystone-db-sync-fpmlm" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.866715 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58d94d7e-5759-47cf-9920-db9d5fa862b3-combined-ca-bundle\") pod \"keystone-db-sync-fpmlm\" (UID: \"58d94d7e-5759-47cf-9920-db9d5fa862b3\") " pod="openstack/keystone-db-sync-fpmlm" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.876126 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58d94d7e-5759-47cf-9920-db9d5fa862b3-config-data\") pod \"keystone-db-sync-fpmlm\" (UID: \"58d94d7e-5759-47cf-9920-db9d5fa862b3\") " pod="openstack/keystone-db-sync-fpmlm" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.879943 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58d94d7e-5759-47cf-9920-db9d5fa862b3-combined-ca-bundle\") pod \"keystone-db-sync-fpmlm\" (UID: \"58d94d7e-5759-47cf-9920-db9d5fa862b3\") " pod="openstack/keystone-db-sync-fpmlm" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.885415 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vkzl\" (UniqueName: \"kubernetes.io/projected/58d94d7e-5759-47cf-9920-db9d5fa862b3-kube-api-access-8vkzl\") pod \"keystone-db-sync-fpmlm\" (UID: \"58d94d7e-5759-47cf-9920-db9d5fa862b3\") " pod="openstack/keystone-db-sync-fpmlm" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.968629 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhfch\" (UniqueName: \"kubernetes.io/projected/63d1c8ec-685f-46b7-859f-b32d265f3504-kube-api-access-rhfch\") pod \"neutron-db-create-8xlw7\" (UID: \"63d1c8ec-685f-46b7-859f-b32d265f3504\") " pod="openstack/neutron-db-create-8xlw7" Dec 02 17:02:45 crc kubenswrapper[4747]: I1202 17:02:45.989657 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhfch\" (UniqueName: \"kubernetes.io/projected/63d1c8ec-685f-46b7-859f-b32d265f3504-kube-api-access-rhfch\") pod \"neutron-db-create-8xlw7\" (UID: \"63d1c8ec-685f-46b7-859f-b32d265f3504\") " pod="openstack/neutron-db-create-8xlw7" Dec 02 17:02:46 crc kubenswrapper[4747]: I1202 17:02:46.099532 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fpmlm" Dec 02 17:02:46 crc kubenswrapper[4747]: I1202 17:02:46.112975 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-8xlw7" Dec 02 17:02:46 crc kubenswrapper[4747]: I1202 17:02:46.327565 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-l8kgp"] Dec 02 17:02:46 crc kubenswrapper[4747]: W1202 17:02:46.371213 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16809698_14ec_49bf_81cf_a4aab9e6dd62.slice/crio-1688761515da10324c84514f0db04475699f5b6cf98e50dd0914f7afcdb043ad WatchSource:0}: Error finding container 1688761515da10324c84514f0db04475699f5b6cf98e50dd0914f7afcdb043ad: Status 404 returned error can't find the container with id 1688761515da10324c84514f0db04475699f5b6cf98e50dd0914f7afcdb043ad Dec 02 17:02:46 crc kubenswrapper[4747]: I1202 17:02:46.655341 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-nbl78"] Dec 02 17:02:46 crc kubenswrapper[4747]: W1202 17:02:46.719503 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6bf036a6_0cd6_4f08_a963_bb63f45d14a6.slice/crio-c1b5c4b9d30c2ad65832bc83472e35d792906e3fa84ddefa112498f6f7fa2178 WatchSource:0}: Error finding container c1b5c4b9d30c2ad65832bc83472e35d792906e3fa84ddefa112498f6f7fa2178: Status 404 returned error can't find the container with id c1b5c4b9d30c2ad65832bc83472e35d792906e3fa84ddefa112498f6f7fa2178 Dec 02 17:02:46 crc kubenswrapper[4747]: I1202 17:02:46.776534 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-fpmlm"] Dec 02 17:02:46 crc kubenswrapper[4747]: I1202 17:02:46.863162 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-8xlw7"] Dec 02 17:02:46 crc kubenswrapper[4747]: W1202 17:02:46.879125 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63d1c8ec_685f_46b7_859f_b32d265f3504.slice/crio-088916f3ad992ebaa4f3aed14cdb138050a492a747da1ffdb595a9d751a1f213 WatchSource:0}: Error finding container 088916f3ad992ebaa4f3aed14cdb138050a492a747da1ffdb595a9d751a1f213: Status 404 returned error can't find the container with id 088916f3ad992ebaa4f3aed14cdb138050a492a747da1ffdb595a9d751a1f213 Dec 02 17:02:47 crc kubenswrapper[4747]: I1202 17:02:47.380512 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fpmlm" event={"ID":"58d94d7e-5759-47cf-9920-db9d5fa862b3","Type":"ContainerStarted","Data":"cabbbed3da865d21de9b123bfac26135ed223498a96854d55d39644315112699"} Dec 02 17:02:47 crc kubenswrapper[4747]: I1202 17:02:47.381991 4747 generic.go:334] "Generic (PLEG): container finished" podID="16809698-14ec-49bf-81cf-a4aab9e6dd62" containerID="bf5b9b8a0d33ba8b010d22ef8019aba459d24d05edd4531489345c7d01bdeb5e" exitCode=0 Dec 02 17:02:47 crc kubenswrapper[4747]: I1202 17:02:47.382062 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-l8kgp" event={"ID":"16809698-14ec-49bf-81cf-a4aab9e6dd62","Type":"ContainerDied","Data":"bf5b9b8a0d33ba8b010d22ef8019aba459d24d05edd4531489345c7d01bdeb5e"} Dec 02 17:02:47 crc kubenswrapper[4747]: I1202 17:02:47.382086 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-l8kgp" event={"ID":"16809698-14ec-49bf-81cf-a4aab9e6dd62","Type":"ContainerStarted","Data":"1688761515da10324c84514f0db04475699f5b6cf98e50dd0914f7afcdb043ad"} Dec 02 17:02:47 crc kubenswrapper[4747]: I1202 17:02:47.402710 4747 generic.go:334] "Generic (PLEG): container finished" podID="63d1c8ec-685f-46b7-859f-b32d265f3504" containerID="6c636c3ef4af986fda8151e8b6b39bf06fbd0f526e5e7e0262a3309590629e14" exitCode=0 Dec 02 17:02:47 crc kubenswrapper[4747]: I1202 17:02:47.402929 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-8xlw7" event={"ID":"63d1c8ec-685f-46b7-859f-b32d265f3504","Type":"ContainerDied","Data":"6c636c3ef4af986fda8151e8b6b39bf06fbd0f526e5e7e0262a3309590629e14"} Dec 02 17:02:47 crc kubenswrapper[4747]: I1202 17:02:47.402968 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-8xlw7" event={"ID":"63d1c8ec-685f-46b7-859f-b32d265f3504","Type":"ContainerStarted","Data":"088916f3ad992ebaa4f3aed14cdb138050a492a747da1ffdb595a9d751a1f213"} Dec 02 17:02:47 crc kubenswrapper[4747]: I1202 17:02:47.409295 4747 generic.go:334] "Generic (PLEG): container finished" podID="6bf036a6-0cd6-4f08-a963-bb63f45d14a6" containerID="738814264b3583f0b3849cd96793b640f5ef7333533695943bf6c4a63fdac69a" exitCode=0 Dec 02 17:02:47 crc kubenswrapper[4747]: I1202 17:02:47.409393 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nbl78" event={"ID":"6bf036a6-0cd6-4f08-a963-bb63f45d14a6","Type":"ContainerDied","Data":"738814264b3583f0b3849cd96793b640f5ef7333533695943bf6c4a63fdac69a"} Dec 02 17:02:47 crc kubenswrapper[4747]: I1202 17:02:47.409433 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nbl78" event={"ID":"6bf036a6-0cd6-4f08-a963-bb63f45d14a6","Type":"ContainerStarted","Data":"c1b5c4b9d30c2ad65832bc83472e35d792906e3fa84ddefa112498f6f7fa2178"} Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.139344 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nbl78" Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.148039 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-8xlw7" Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.292454 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-l8kgp" Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.309719 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhfch\" (UniqueName: \"kubernetes.io/projected/63d1c8ec-685f-46b7-859f-b32d265f3504-kube-api-access-rhfch\") pod \"63d1c8ec-685f-46b7-859f-b32d265f3504\" (UID: \"63d1c8ec-685f-46b7-859f-b32d265f3504\") " Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.309876 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7r99c\" (UniqueName: \"kubernetes.io/projected/6bf036a6-0cd6-4f08-a963-bb63f45d14a6-kube-api-access-7r99c\") pod \"6bf036a6-0cd6-4f08-a963-bb63f45d14a6\" (UID: \"6bf036a6-0cd6-4f08-a963-bb63f45d14a6\") " Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.318332 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63d1c8ec-685f-46b7-859f-b32d265f3504-kube-api-access-rhfch" (OuterVolumeSpecName: "kube-api-access-rhfch") pod "63d1c8ec-685f-46b7-859f-b32d265f3504" (UID: "63d1c8ec-685f-46b7-859f-b32d265f3504"). InnerVolumeSpecName "kube-api-access-rhfch". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.327434 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bf036a6-0cd6-4f08-a963-bb63f45d14a6-kube-api-access-7r99c" (OuterVolumeSpecName: "kube-api-access-7r99c") pod "6bf036a6-0cd6-4f08-a963-bb63f45d14a6" (UID: "6bf036a6-0cd6-4f08-a963-bb63f45d14a6"). InnerVolumeSpecName "kube-api-access-7r99c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.411046 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpkrw\" (UniqueName: \"kubernetes.io/projected/16809698-14ec-49bf-81cf-a4aab9e6dd62-kube-api-access-jpkrw\") pod \"16809698-14ec-49bf-81cf-a4aab9e6dd62\" (UID: \"16809698-14ec-49bf-81cf-a4aab9e6dd62\") " Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.411709 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhfch\" (UniqueName: \"kubernetes.io/projected/63d1c8ec-685f-46b7-859f-b32d265f3504-kube-api-access-rhfch\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.411736 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7r99c\" (UniqueName: \"kubernetes.io/projected/6bf036a6-0cd6-4f08-a963-bb63f45d14a6-kube-api-access-7r99c\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.417194 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16809698-14ec-49bf-81cf-a4aab9e6dd62-kube-api-access-jpkrw" (OuterVolumeSpecName: "kube-api-access-jpkrw") pod "16809698-14ec-49bf-81cf-a4aab9e6dd62" (UID: "16809698-14ec-49bf-81cf-a4aab9e6dd62"). InnerVolumeSpecName "kube-api-access-jpkrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.430303 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nbl78" event={"ID":"6bf036a6-0cd6-4f08-a963-bb63f45d14a6","Type":"ContainerDied","Data":"c1b5c4b9d30c2ad65832bc83472e35d792906e3fa84ddefa112498f6f7fa2178"} Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.430353 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1b5c4b9d30c2ad65832bc83472e35d792906e3fa84ddefa112498f6f7fa2178" Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.430417 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nbl78" Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.433829 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-l8kgp" event={"ID":"16809698-14ec-49bf-81cf-a4aab9e6dd62","Type":"ContainerDied","Data":"1688761515da10324c84514f0db04475699f5b6cf98e50dd0914f7afcdb043ad"} Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.433888 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1688761515da10324c84514f0db04475699f5b6cf98e50dd0914f7afcdb043ad" Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.433972 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-l8kgp" Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.441633 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-8xlw7" event={"ID":"63d1c8ec-685f-46b7-859f-b32d265f3504","Type":"ContainerDied","Data":"088916f3ad992ebaa4f3aed14cdb138050a492a747da1ffdb595a9d751a1f213"} Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.441681 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="088916f3ad992ebaa4f3aed14cdb138050a492a747da1ffdb595a9d751a1f213" Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.441731 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-8xlw7" Dec 02 17:02:49 crc kubenswrapper[4747]: I1202 17:02:49.513556 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpkrw\" (UniqueName: \"kubernetes.io/projected/16809698-14ec-49bf-81cf-a4aab9e6dd62-kube-api-access-jpkrw\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:52 crc kubenswrapper[4747]: I1202 17:02:52.200140 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:02:52 crc kubenswrapper[4747]: I1202 17:02:52.340581 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-nlq7d"] Dec 02 17:02:52 crc kubenswrapper[4747]: I1202 17:02:52.354541 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-nlq7d" podUID="4e94e53f-7429-4ad7-86b8-cee6a01b0493" containerName="dnsmasq-dns" containerID="cri-o://4e7d8a7b4e9b616f06849b9ab388e5cb6b7db333062485c6359f48ce32182f6d" gracePeriod=10 Dec 02 17:02:53 crc kubenswrapper[4747]: I1202 17:02:53.490167 4747 generic.go:334] "Generic (PLEG): container finished" podID="4e94e53f-7429-4ad7-86b8-cee6a01b0493" containerID="4e7d8a7b4e9b616f06849b9ab388e5cb6b7db333062485c6359f48ce32182f6d" exitCode=0 Dec 02 17:02:53 crc kubenswrapper[4747]: I1202 17:02:53.490253 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-nlq7d" event={"ID":"4e94e53f-7429-4ad7-86b8-cee6a01b0493","Type":"ContainerDied","Data":"4e7d8a7b4e9b616f06849b9ab388e5cb6b7db333062485c6359f48ce32182f6d"} Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.428709 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-b45f-account-create-zsdqj"] Dec 02 17:02:55 crc kubenswrapper[4747]: E1202 17:02:55.429644 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bf036a6-0cd6-4f08-a963-bb63f45d14a6" containerName="mariadb-database-create" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.429681 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bf036a6-0cd6-4f08-a963-bb63f45d14a6" containerName="mariadb-database-create" Dec 02 17:02:55 crc kubenswrapper[4747]: E1202 17:02:55.429706 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63d1c8ec-685f-46b7-859f-b32d265f3504" containerName="mariadb-database-create" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.429717 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="63d1c8ec-685f-46b7-859f-b32d265f3504" containerName="mariadb-database-create" Dec 02 17:02:55 crc kubenswrapper[4747]: E1202 17:02:55.429753 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16809698-14ec-49bf-81cf-a4aab9e6dd62" containerName="mariadb-database-create" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.429767 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="16809698-14ec-49bf-81cf-a4aab9e6dd62" containerName="mariadb-database-create" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.430092 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="63d1c8ec-685f-46b7-859f-b32d265f3504" containerName="mariadb-database-create" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.430120 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bf036a6-0cd6-4f08-a963-bb63f45d14a6" containerName="mariadb-database-create" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.430140 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="16809698-14ec-49bf-81cf-a4aab9e6dd62" containerName="mariadb-database-create" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.431065 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-b45f-account-create-zsdqj" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.435045 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.441990 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-b45f-account-create-zsdqj"] Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.489034 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-nlq7d" podUID="4e94e53f-7429-4ad7-86b8-cee6a01b0493" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: connect: connection refused" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.534164 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czcpm\" (UniqueName: \"kubernetes.io/projected/34c0b183-b0c0-4ce9-b869-1e9c0f5406a7-kube-api-access-czcpm\") pod \"barbican-b45f-account-create-zsdqj\" (UID: \"34c0b183-b0c0-4ce9-b869-1e9c0f5406a7\") " pod="openstack/barbican-b45f-account-create-zsdqj" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.615794 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-caab-account-create-fzvvr"] Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.617630 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-caab-account-create-fzvvr" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.620140 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.628009 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-caab-account-create-fzvvr"] Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.635465 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czcpm\" (UniqueName: \"kubernetes.io/projected/34c0b183-b0c0-4ce9-b869-1e9c0f5406a7-kube-api-access-czcpm\") pod \"barbican-b45f-account-create-zsdqj\" (UID: \"34c0b183-b0c0-4ce9-b869-1e9c0f5406a7\") " pod="openstack/barbican-b45f-account-create-zsdqj" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.656804 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czcpm\" (UniqueName: \"kubernetes.io/projected/34c0b183-b0c0-4ce9-b869-1e9c0f5406a7-kube-api-access-czcpm\") pod \"barbican-b45f-account-create-zsdqj\" (UID: \"34c0b183-b0c0-4ce9-b869-1e9c0f5406a7\") " pod="openstack/barbican-b45f-account-create-zsdqj" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.736571 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7kkp\" (UniqueName: \"kubernetes.io/projected/bd662fa1-bbd1-4df5-8209-a062930ce5a5-kube-api-access-k7kkp\") pod \"cinder-caab-account-create-fzvvr\" (UID: \"bd662fa1-bbd1-4df5-8209-a062930ce5a5\") " pod="openstack/cinder-caab-account-create-fzvvr" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.754964 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-b45f-account-create-zsdqj" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.863277 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7kkp\" (UniqueName: \"kubernetes.io/projected/bd662fa1-bbd1-4df5-8209-a062930ce5a5-kube-api-access-k7kkp\") pod \"cinder-caab-account-create-fzvvr\" (UID: \"bd662fa1-bbd1-4df5-8209-a062930ce5a5\") " pod="openstack/cinder-caab-account-create-fzvvr" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.875571 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-49fa-account-create-9bzgd"] Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.877087 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-49fa-account-create-9bzgd" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.884283 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.889712 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7kkp\" (UniqueName: \"kubernetes.io/projected/bd662fa1-bbd1-4df5-8209-a062930ce5a5-kube-api-access-k7kkp\") pod \"cinder-caab-account-create-fzvvr\" (UID: \"bd662fa1-bbd1-4df5-8209-a062930ce5a5\") " pod="openstack/cinder-caab-account-create-fzvvr" Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.899736 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-49fa-account-create-9bzgd"] Dec 02 17:02:55 crc kubenswrapper[4747]: I1202 17:02:55.934857 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-caab-account-create-fzvvr" Dec 02 17:02:56 crc kubenswrapper[4747]: I1202 17:02:56.073001 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cshjr\" (UniqueName: \"kubernetes.io/projected/b29989e1-d454-4535-b5b2-cae16c355c4a-kube-api-access-cshjr\") pod \"neutron-49fa-account-create-9bzgd\" (UID: \"b29989e1-d454-4535-b5b2-cae16c355c4a\") " pod="openstack/neutron-49fa-account-create-9bzgd" Dec 02 17:02:56 crc kubenswrapper[4747]: I1202 17:02:56.174655 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cshjr\" (UniqueName: \"kubernetes.io/projected/b29989e1-d454-4535-b5b2-cae16c355c4a-kube-api-access-cshjr\") pod \"neutron-49fa-account-create-9bzgd\" (UID: \"b29989e1-d454-4535-b5b2-cae16c355c4a\") " pod="openstack/neutron-49fa-account-create-9bzgd" Dec 02 17:02:56 crc kubenswrapper[4747]: I1202 17:02:56.201348 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cshjr\" (UniqueName: \"kubernetes.io/projected/b29989e1-d454-4535-b5b2-cae16c355c4a-kube-api-access-cshjr\") pod \"neutron-49fa-account-create-9bzgd\" (UID: \"b29989e1-d454-4535-b5b2-cae16c355c4a\") " pod="openstack/neutron-49fa-account-create-9bzgd" Dec 02 17:02:56 crc kubenswrapper[4747]: I1202 17:02:56.233005 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-49fa-account-create-9bzgd" Dec 02 17:02:58 crc kubenswrapper[4747]: I1202 17:02:58.541225 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fpmlm" event={"ID":"58d94d7e-5759-47cf-9920-db9d5fa862b3","Type":"ContainerStarted","Data":"e3473243b1b146a48828280d697b5c3ee79c1b98feaa0f4a2fd88985ce7816c5"} Dec 02 17:02:58 crc kubenswrapper[4747]: I1202 17:02:58.575698 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-fpmlm" podStartSLOduration=2.785874599 podStartE2EDuration="13.575675329s" podCreationTimestamp="2025-12-02 17:02:45 +0000 UTC" firstStartedPulling="2025-12-02 17:02:46.782373672 +0000 UTC m=+1197.309262421" lastFinishedPulling="2025-12-02 17:02:57.572174412 +0000 UTC m=+1208.099063151" observedRunningTime="2025-12-02 17:02:58.565240393 +0000 UTC m=+1209.092129152" watchObservedRunningTime="2025-12-02 17:02:58.575675329 +0000 UTC m=+1209.102564078" Dec 02 17:02:58 crc kubenswrapper[4747]: I1202 17:02:58.924530 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-49fa-account-create-9bzgd"] Dec 02 17:02:58 crc kubenswrapper[4747]: W1202 17:02:58.930965 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb29989e1_d454_4535_b5b2_cae16c355c4a.slice/crio-519859a49f21172c2440511753248a15328c38a1724fc50f0d1cc752b1aa09aa WatchSource:0}: Error finding container 519859a49f21172c2440511753248a15328c38a1724fc50f0d1cc752b1aa09aa: Status 404 returned error can't find the container with id 519859a49f21172c2440511753248a15328c38a1724fc50f0d1cc752b1aa09aa Dec 02 17:02:58 crc kubenswrapper[4747]: I1202 17:02:58.999661 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-caab-account-create-fzvvr"] Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.008579 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-b45f-account-create-zsdqj"] Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.030197 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.155003 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-ovsdbserver-sb\") pod \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.155050 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-ovsdbserver-nb\") pod \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.155779 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-config\") pod \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.155816 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bs7p8\" (UniqueName: \"kubernetes.io/projected/4e94e53f-7429-4ad7-86b8-cee6a01b0493-kube-api-access-bs7p8\") pod \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.155857 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-dns-svc\") pod \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\" (UID: \"4e94e53f-7429-4ad7-86b8-cee6a01b0493\") " Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.167812 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e94e53f-7429-4ad7-86b8-cee6a01b0493-kube-api-access-bs7p8" (OuterVolumeSpecName: "kube-api-access-bs7p8") pod "4e94e53f-7429-4ad7-86b8-cee6a01b0493" (UID: "4e94e53f-7429-4ad7-86b8-cee6a01b0493"). InnerVolumeSpecName "kube-api-access-bs7p8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.241645 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4e94e53f-7429-4ad7-86b8-cee6a01b0493" (UID: "4e94e53f-7429-4ad7-86b8-cee6a01b0493"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.242044 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4e94e53f-7429-4ad7-86b8-cee6a01b0493" (UID: "4e94e53f-7429-4ad7-86b8-cee6a01b0493"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.247729 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4e94e53f-7429-4ad7-86b8-cee6a01b0493" (UID: "4e94e53f-7429-4ad7-86b8-cee6a01b0493"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.254244 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-config" (OuterVolumeSpecName: "config") pod "4e94e53f-7429-4ad7-86b8-cee6a01b0493" (UID: "4e94e53f-7429-4ad7-86b8-cee6a01b0493"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.258064 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.258098 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.258116 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.258131 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bs7p8\" (UniqueName: \"kubernetes.io/projected/4e94e53f-7429-4ad7-86b8-cee6a01b0493-kube-api-access-bs7p8\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.258145 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4e94e53f-7429-4ad7-86b8-cee6a01b0493-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.554333 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-nlq7d" Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.554361 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-nlq7d" event={"ID":"4e94e53f-7429-4ad7-86b8-cee6a01b0493","Type":"ContainerDied","Data":"909cdccc449ebc05d7873060b80ff7aaeef93cc7288cac16f966bd4d44fac494"} Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.554827 4747 scope.go:117] "RemoveContainer" containerID="4e7d8a7b4e9b616f06849b9ab388e5cb6b7db333062485c6359f48ce32182f6d" Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.556382 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-caab-account-create-fzvvr" event={"ID":"bd662fa1-bbd1-4df5-8209-a062930ce5a5","Type":"ContainerStarted","Data":"f3cc78ea769ccab77ba67659ff16524b4a7d422ce28605ca4bae0f6114becec2"} Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.556410 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-caab-account-create-fzvvr" event={"ID":"bd662fa1-bbd1-4df5-8209-a062930ce5a5","Type":"ContainerStarted","Data":"e594957e592e8e0c6725da0aa4f42a7ebe4c32cbbae90020fdae3dd3a10693fb"} Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.560166 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-b45f-account-create-zsdqj" event={"ID":"34c0b183-b0c0-4ce9-b869-1e9c0f5406a7","Type":"ContainerStarted","Data":"080e1fe456b3764e74df995bcbc9f90df6e09d02c8d5c28a3c81192046d34935"} Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.560202 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-b45f-account-create-zsdqj" event={"ID":"34c0b183-b0c0-4ce9-b869-1e9c0f5406a7","Type":"ContainerStarted","Data":"535fdf36f6fb4fd40c5815bbe498be8d314f0ac582964323f742b243026930f7"} Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.564423 4747 generic.go:334] "Generic (PLEG): container finished" podID="b29989e1-d454-4535-b5b2-cae16c355c4a" containerID="d31bfbc4f2a3336dc57eeb4a557e764dd8f16fd2cc74aab31ffbff6d17c9e25d" exitCode=0 Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.564488 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-49fa-account-create-9bzgd" event={"ID":"b29989e1-d454-4535-b5b2-cae16c355c4a","Type":"ContainerDied","Data":"d31bfbc4f2a3336dc57eeb4a557e764dd8f16fd2cc74aab31ffbff6d17c9e25d"} Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.564519 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-49fa-account-create-9bzgd" event={"ID":"b29989e1-d454-4535-b5b2-cae16c355c4a","Type":"ContainerStarted","Data":"519859a49f21172c2440511753248a15328c38a1724fc50f0d1cc752b1aa09aa"} Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.613340 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-caab-account-create-fzvvr" podStartSLOduration=4.613317375 podStartE2EDuration="4.613317375s" podCreationTimestamp="2025-12-02 17:02:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:02:59.60643928 +0000 UTC m=+1210.133328039" watchObservedRunningTime="2025-12-02 17:02:59.613317375 +0000 UTC m=+1210.140206124" Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.631596 4747 scope.go:117] "RemoveContainer" containerID="06146c17b1d3189609edf70f1249e65cd9d38ba89555cb13114ce7c3cd9f8d94" Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.696157 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-nlq7d"] Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.706084 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-nlq7d"] Dec 02 17:02:59 crc kubenswrapper[4747]: I1202 17:02:59.775774 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e94e53f-7429-4ad7-86b8-cee6a01b0493" path="/var/lib/kubelet/pods/4e94e53f-7429-4ad7-86b8-cee6a01b0493/volumes" Dec 02 17:03:00 crc kubenswrapper[4747]: I1202 17:03:00.575554 4747 generic.go:334] "Generic (PLEG): container finished" podID="bd662fa1-bbd1-4df5-8209-a062930ce5a5" containerID="f3cc78ea769ccab77ba67659ff16524b4a7d422ce28605ca4bae0f6114becec2" exitCode=0 Dec 02 17:03:00 crc kubenswrapper[4747]: I1202 17:03:00.575660 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-caab-account-create-fzvvr" event={"ID":"bd662fa1-bbd1-4df5-8209-a062930ce5a5","Type":"ContainerDied","Data":"f3cc78ea769ccab77ba67659ff16524b4a7d422ce28605ca4bae0f6114becec2"} Dec 02 17:03:00 crc kubenswrapper[4747]: I1202 17:03:00.577218 4747 generic.go:334] "Generic (PLEG): container finished" podID="34c0b183-b0c0-4ce9-b869-1e9c0f5406a7" containerID="080e1fe456b3764e74df995bcbc9f90df6e09d02c8d5c28a3c81192046d34935" exitCode=0 Dec 02 17:03:00 crc kubenswrapper[4747]: I1202 17:03:00.577276 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-b45f-account-create-zsdqj" event={"ID":"34c0b183-b0c0-4ce9-b869-1e9c0f5406a7","Type":"ContainerDied","Data":"080e1fe456b3764e74df995bcbc9f90df6e09d02c8d5c28a3c81192046d34935"} Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.018149 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-b45f-account-create-zsdqj" Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.025659 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-49fa-account-create-9bzgd" Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.145984 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czcpm\" (UniqueName: \"kubernetes.io/projected/34c0b183-b0c0-4ce9-b869-1e9c0f5406a7-kube-api-access-czcpm\") pod \"34c0b183-b0c0-4ce9-b869-1e9c0f5406a7\" (UID: \"34c0b183-b0c0-4ce9-b869-1e9c0f5406a7\") " Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.146192 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cshjr\" (UniqueName: \"kubernetes.io/projected/b29989e1-d454-4535-b5b2-cae16c355c4a-kube-api-access-cshjr\") pod \"b29989e1-d454-4535-b5b2-cae16c355c4a\" (UID: \"b29989e1-d454-4535-b5b2-cae16c355c4a\") " Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.152857 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34c0b183-b0c0-4ce9-b869-1e9c0f5406a7-kube-api-access-czcpm" (OuterVolumeSpecName: "kube-api-access-czcpm") pod "34c0b183-b0c0-4ce9-b869-1e9c0f5406a7" (UID: "34c0b183-b0c0-4ce9-b869-1e9c0f5406a7"). InnerVolumeSpecName "kube-api-access-czcpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.153282 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b29989e1-d454-4535-b5b2-cae16c355c4a-kube-api-access-cshjr" (OuterVolumeSpecName: "kube-api-access-cshjr") pod "b29989e1-d454-4535-b5b2-cae16c355c4a" (UID: "b29989e1-d454-4535-b5b2-cae16c355c4a"). InnerVolumeSpecName "kube-api-access-cshjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.249109 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czcpm\" (UniqueName: \"kubernetes.io/projected/34c0b183-b0c0-4ce9-b869-1e9c0f5406a7-kube-api-access-czcpm\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.249154 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cshjr\" (UniqueName: \"kubernetes.io/projected/b29989e1-d454-4535-b5b2-cae16c355c4a-kube-api-access-cshjr\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.588156 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-b45f-account-create-zsdqj" Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.588502 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-b45f-account-create-zsdqj" event={"ID":"34c0b183-b0c0-4ce9-b869-1e9c0f5406a7","Type":"ContainerDied","Data":"535fdf36f6fb4fd40c5815bbe498be8d314f0ac582964323f742b243026930f7"} Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.589466 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="535fdf36f6fb4fd40c5815bbe498be8d314f0ac582964323f742b243026930f7" Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.589754 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-49fa-account-create-9bzgd" event={"ID":"b29989e1-d454-4535-b5b2-cae16c355c4a","Type":"ContainerDied","Data":"519859a49f21172c2440511753248a15328c38a1724fc50f0d1cc752b1aa09aa"} Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.589781 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="519859a49f21172c2440511753248a15328c38a1724fc50f0d1cc752b1aa09aa" Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.590191 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-49fa-account-create-9bzgd" Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.795479 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.795554 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.824629 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-caab-account-create-fzvvr" Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.960190 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7kkp\" (UniqueName: \"kubernetes.io/projected/bd662fa1-bbd1-4df5-8209-a062930ce5a5-kube-api-access-k7kkp\") pod \"bd662fa1-bbd1-4df5-8209-a062930ce5a5\" (UID: \"bd662fa1-bbd1-4df5-8209-a062930ce5a5\") " Dec 02 17:03:01 crc kubenswrapper[4747]: I1202 17:03:01.967204 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd662fa1-bbd1-4df5-8209-a062930ce5a5-kube-api-access-k7kkp" (OuterVolumeSpecName: "kube-api-access-k7kkp") pod "bd662fa1-bbd1-4df5-8209-a062930ce5a5" (UID: "bd662fa1-bbd1-4df5-8209-a062930ce5a5"). InnerVolumeSpecName "kube-api-access-k7kkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:03:02 crc kubenswrapper[4747]: I1202 17:03:02.063137 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7kkp\" (UniqueName: \"kubernetes.io/projected/bd662fa1-bbd1-4df5-8209-a062930ce5a5-kube-api-access-k7kkp\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:02 crc kubenswrapper[4747]: I1202 17:03:02.601102 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-caab-account-create-fzvvr" Dec 02 17:03:02 crc kubenswrapper[4747]: I1202 17:03:02.601105 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-caab-account-create-fzvvr" event={"ID":"bd662fa1-bbd1-4df5-8209-a062930ce5a5","Type":"ContainerDied","Data":"e594957e592e8e0c6725da0aa4f42a7ebe4c32cbbae90020fdae3dd3a10693fb"} Dec 02 17:03:02 crc kubenswrapper[4747]: I1202 17:03:02.601156 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e594957e592e8e0c6725da0aa4f42a7ebe4c32cbbae90020fdae3dd3a10693fb" Dec 02 17:03:02 crc kubenswrapper[4747]: I1202 17:03:02.603179 4747 generic.go:334] "Generic (PLEG): container finished" podID="58d94d7e-5759-47cf-9920-db9d5fa862b3" containerID="e3473243b1b146a48828280d697b5c3ee79c1b98feaa0f4a2fd88985ce7816c5" exitCode=0 Dec 02 17:03:02 crc kubenswrapper[4747]: I1202 17:03:02.603253 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fpmlm" event={"ID":"58d94d7e-5759-47cf-9920-db9d5fa862b3","Type":"ContainerDied","Data":"e3473243b1b146a48828280d697b5c3ee79c1b98feaa0f4a2fd88985ce7816c5"} Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.010261 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fpmlm" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.206873 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58d94d7e-5759-47cf-9920-db9d5fa862b3-combined-ca-bundle\") pod \"58d94d7e-5759-47cf-9920-db9d5fa862b3\" (UID: \"58d94d7e-5759-47cf-9920-db9d5fa862b3\") " Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.207015 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vkzl\" (UniqueName: \"kubernetes.io/projected/58d94d7e-5759-47cf-9920-db9d5fa862b3-kube-api-access-8vkzl\") pod \"58d94d7e-5759-47cf-9920-db9d5fa862b3\" (UID: \"58d94d7e-5759-47cf-9920-db9d5fa862b3\") " Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.207214 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58d94d7e-5759-47cf-9920-db9d5fa862b3-config-data\") pod \"58d94d7e-5759-47cf-9920-db9d5fa862b3\" (UID: \"58d94d7e-5759-47cf-9920-db9d5fa862b3\") " Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.214989 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58d94d7e-5759-47cf-9920-db9d5fa862b3-kube-api-access-8vkzl" (OuterVolumeSpecName: "kube-api-access-8vkzl") pod "58d94d7e-5759-47cf-9920-db9d5fa862b3" (UID: "58d94d7e-5759-47cf-9920-db9d5fa862b3"). InnerVolumeSpecName "kube-api-access-8vkzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.230597 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58d94d7e-5759-47cf-9920-db9d5fa862b3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "58d94d7e-5759-47cf-9920-db9d5fa862b3" (UID: "58d94d7e-5759-47cf-9920-db9d5fa862b3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.248152 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58d94d7e-5759-47cf-9920-db9d5fa862b3-config-data" (OuterVolumeSpecName: "config-data") pod "58d94d7e-5759-47cf-9920-db9d5fa862b3" (UID: "58d94d7e-5759-47cf-9920-db9d5fa862b3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.309396 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58d94d7e-5759-47cf-9920-db9d5fa862b3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.309437 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vkzl\" (UniqueName: \"kubernetes.io/projected/58d94d7e-5759-47cf-9920-db9d5fa862b3-kube-api-access-8vkzl\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.309448 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58d94d7e-5759-47cf-9920-db9d5fa862b3-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.625435 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fpmlm" event={"ID":"58d94d7e-5759-47cf-9920-db9d5fa862b3","Type":"ContainerDied","Data":"cabbbed3da865d21de9b123bfac26135ed223498a96854d55d39644315112699"} Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.625718 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fpmlm" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.625749 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cabbbed3da865d21de9b123bfac26135ed223498a96854d55d39644315112699" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.900851 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd"] Dec 02 17:03:04 crc kubenswrapper[4747]: E1202 17:03:04.901433 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b29989e1-d454-4535-b5b2-cae16c355c4a" containerName="mariadb-account-create" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.901465 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b29989e1-d454-4535-b5b2-cae16c355c4a" containerName="mariadb-account-create" Dec 02 17:03:04 crc kubenswrapper[4747]: E1202 17:03:04.901480 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd662fa1-bbd1-4df5-8209-a062930ce5a5" containerName="mariadb-account-create" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.901489 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd662fa1-bbd1-4df5-8209-a062930ce5a5" containerName="mariadb-account-create" Dec 02 17:03:04 crc kubenswrapper[4747]: E1202 17:03:04.901511 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e94e53f-7429-4ad7-86b8-cee6a01b0493" containerName="dnsmasq-dns" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.901523 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e94e53f-7429-4ad7-86b8-cee6a01b0493" containerName="dnsmasq-dns" Dec 02 17:03:04 crc kubenswrapper[4747]: E1202 17:03:04.901546 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58d94d7e-5759-47cf-9920-db9d5fa862b3" containerName="keystone-db-sync" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.901555 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="58d94d7e-5759-47cf-9920-db9d5fa862b3" containerName="keystone-db-sync" Dec 02 17:03:04 crc kubenswrapper[4747]: E1202 17:03:04.901570 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e94e53f-7429-4ad7-86b8-cee6a01b0493" containerName="init" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.901578 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e94e53f-7429-4ad7-86b8-cee6a01b0493" containerName="init" Dec 02 17:03:04 crc kubenswrapper[4747]: E1202 17:03:04.901597 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34c0b183-b0c0-4ce9-b869-1e9c0f5406a7" containerName="mariadb-account-create" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.901605 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="34c0b183-b0c0-4ce9-b869-1e9c0f5406a7" containerName="mariadb-account-create" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.901825 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="58d94d7e-5759-47cf-9920-db9d5fa862b3" containerName="keystone-db-sync" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.901845 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd662fa1-bbd1-4df5-8209-a062930ce5a5" containerName="mariadb-account-create" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.901860 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b29989e1-d454-4535-b5b2-cae16c355c4a" containerName="mariadb-account-create" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.901895 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e94e53f-7429-4ad7-86b8-cee6a01b0493" containerName="dnsmasq-dns" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.902777 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="34c0b183-b0c0-4ce9-b869-1e9c0f5406a7" containerName="mariadb-account-create" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.904060 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.922884 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.923006 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-config\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.923032 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.923150 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.923175 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.923201 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zngp\" (UniqueName: \"kubernetes.io/projected/67f47fd7-349d-49f7-8b32-eaedbe60cf69-kube-api-access-8zngp\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.925865 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd"] Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.938709 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-q7gd9"] Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.950016 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.956189 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6zr9j" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.956418 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.956551 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.956652 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 02 17:03:04 crc kubenswrapper[4747]: I1202 17:03:04.973802 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-q7gd9"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.025274 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tfkl\" (UniqueName: \"kubernetes.io/projected/dc039d89-567b-4837-8b5b-a5903c7a437c-kube-api-access-8tfkl\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.025352 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-config-data\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.025393 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-config\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.025417 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.025447 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-fernet-keys\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.025520 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.025546 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.025577 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zngp\" (UniqueName: \"kubernetes.io/projected/67f47fd7-349d-49f7-8b32-eaedbe60cf69-kube-api-access-8zngp\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.025602 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-credential-keys\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.025631 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-combined-ca-bundle\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.025658 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-scripts\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.025686 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.027068 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.027735 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.027894 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-config\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.028537 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.029488 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.066141 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zngp\" (UniqueName: \"kubernetes.io/projected/67f47fd7-349d-49f7-8b32-eaedbe60cf69-kube-api-access-8zngp\") pod \"dnsmasq-dns-5c5cc7c5ff-wwmgd\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.116684 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-568d9bc79c-hb4wc"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.118684 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.124039 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-t6fsr" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.124438 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.124615 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.124803 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.128963 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-fernet-keys\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.129057 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-credential-keys\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.129092 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-combined-ca-bundle\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.129116 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-scripts\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.129177 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tfkl\" (UniqueName: \"kubernetes.io/projected/dc039d89-567b-4837-8b5b-a5903c7a437c-kube-api-access-8tfkl\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.129228 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-config-data\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.135523 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-568d9bc79c-hb4wc"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.141163 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-config-data\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.149160 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-fernet-keys\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.149462 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-scripts\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.151394 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-combined-ca-bundle\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.180591 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-credential-keys\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.181489 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tfkl\" (UniqueName: \"kubernetes.io/projected/dc039d89-567b-4837-8b5b-a5903c7a437c-kube-api-access-8tfkl\") pod \"keystone-bootstrap-q7gd9\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.229746 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.233991 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.235001 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c499186d-ded2-4595-a041-07143f5676cd-scripts\") pod \"horizon-568d9bc79c-hb4wc\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.235179 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c499186d-ded2-4595-a041-07143f5676cd-horizon-secret-key\") pod \"horizon-568d9bc79c-hb4wc\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.235344 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c499186d-ded2-4595-a041-07143f5676cd-logs\") pod \"horizon-568d9bc79c-hb4wc\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.235453 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52df8\" (UniqueName: \"kubernetes.io/projected/c499186d-ded2-4595-a041-07143f5676cd-kube-api-access-52df8\") pod \"horizon-568d9bc79c-hb4wc\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.235549 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c499186d-ded2-4595-a041-07143f5676cd-config-data\") pod \"horizon-568d9bc79c-hb4wc\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.236390 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.240472 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.245302 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.282399 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.287380 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.342350 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/210762bb-c409-4ed0-af37-4e70f2c7c955-run-httpd\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.342396 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-config-data\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.342429 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c499186d-ded2-4595-a041-07143f5676cd-scripts\") pod \"horizon-568d9bc79c-hb4wc\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.342450 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c499186d-ded2-4595-a041-07143f5676cd-horizon-secret-key\") pod \"horizon-568d9bc79c-hb4wc\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.342512 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c499186d-ded2-4595-a041-07143f5676cd-logs\") pod \"horizon-568d9bc79c-hb4wc\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.342533 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvs2g\" (UniqueName: \"kubernetes.io/projected/210762bb-c409-4ed0-af37-4e70f2c7c955-kube-api-access-kvs2g\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.342555 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52df8\" (UniqueName: \"kubernetes.io/projected/c499186d-ded2-4595-a041-07143f5676cd-kube-api-access-52df8\") pod \"horizon-568d9bc79c-hb4wc\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.342570 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.342599 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/210762bb-c409-4ed0-af37-4e70f2c7c955-log-httpd\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.342625 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c499186d-ded2-4595-a041-07143f5676cd-config-data\") pod \"horizon-568d9bc79c-hb4wc\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.342658 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.342675 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-scripts\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.343845 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c499186d-ded2-4595-a041-07143f5676cd-scripts\") pod \"horizon-568d9bc79c-hb4wc\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.344148 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c499186d-ded2-4595-a041-07143f5676cd-logs\") pod \"horizon-568d9bc79c-hb4wc\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.345066 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c499186d-ded2-4595-a041-07143f5676cd-config-data\") pod \"horizon-568d9bc79c-hb4wc\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.356396 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-d5cdc885-fzw6d"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.358690 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.360385 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c499186d-ded2-4595-a041-07143f5676cd-horizon-secret-key\") pod \"horizon-568d9bc79c-hb4wc\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.374038 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-d5cdc885-fzw6d"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.390384 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-sclpf"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.397495 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.402995 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.403325 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-p5l74" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.403499 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.404865 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52df8\" (UniqueName: \"kubernetes.io/projected/c499186d-ded2-4595-a041-07143f5676cd-kube-api-access-52df8\") pod \"horizon-568d9bc79c-hb4wc\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.434716 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-sclpf"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.448029 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.448088 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/210762bb-c409-4ed0-af37-4e70f2c7c955-log-httpd\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.448150 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.448167 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-scripts\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.448200 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/210762bb-c409-4ed0-af37-4e70f2c7c955-run-httpd\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.448213 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-config-data\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.448293 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvs2g\" (UniqueName: \"kubernetes.io/projected/210762bb-c409-4ed0-af37-4e70f2c7c955-kube-api-access-kvs2g\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.451223 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/210762bb-c409-4ed0-af37-4e70f2c7c955-run-httpd\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.453404 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/210762bb-c409-4ed0-af37-4e70f2c7c955-log-httpd\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.463182 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.463339 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-scripts\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.464948 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-config-data\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.478635 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.498730 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvs2g\" (UniqueName: \"kubernetes.io/projected/210762bb-c409-4ed0-af37-4e70f2c7c955-kube-api-access-kvs2g\") pod \"ceilometer-0\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.530735 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.539641 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.583570 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-logs\") pod \"placement-db-sync-sclpf\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.585371 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-scripts\") pod \"placement-db-sync-sclpf\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.586086 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6131d8c6-b2aa-44b9-afe5-617eccd809bc-horizon-secret-key\") pod \"horizon-d5cdc885-fzw6d\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.586177 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-combined-ca-bundle\") pod \"placement-db-sync-sclpf\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.586292 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-config-data\") pod \"placement-db-sync-sclpf\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.586320 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsxp7\" (UniqueName: \"kubernetes.io/projected/6131d8c6-b2aa-44b9-afe5-617eccd809bc-kube-api-access-zsxp7\") pod \"horizon-d5cdc885-fzw6d\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.586348 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6131d8c6-b2aa-44b9-afe5-617eccd809bc-config-data\") pod \"horizon-d5cdc885-fzw6d\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.586431 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6131d8c6-b2aa-44b9-afe5-617eccd809bc-scripts\") pod \"horizon-d5cdc885-fzw6d\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.586456 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6131d8c6-b2aa-44b9-afe5-617eccd809bc-logs\") pod \"horizon-d5cdc885-fzw6d\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.586513 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmm5g\" (UniqueName: \"kubernetes.io/projected/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-kube-api-access-kmm5g\") pod \"placement-db-sync-sclpf\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.633966 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.646480 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.650216 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.652784 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.653718 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-5ddkh" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.654648 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.654771 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.681995 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-smmcv"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.689055 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.689112 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-479wq\" (UniqueName: \"kubernetes.io/projected/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-kube-api-access-479wq\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.689163 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-logs\") pod \"placement-db-sync-sclpf\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.689208 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-scripts\") pod \"placement-db-sync-sclpf\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.689234 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6131d8c6-b2aa-44b9-afe5-617eccd809bc-horizon-secret-key\") pod \"horizon-d5cdc885-fzw6d\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.689256 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-scripts\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.689282 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-config-data\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.689302 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-combined-ca-bundle\") pod \"placement-db-sync-sclpf\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.689340 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.691490 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-config-data\") pod \"placement-db-sync-sclpf\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.691523 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsxp7\" (UniqueName: \"kubernetes.io/projected/6131d8c6-b2aa-44b9-afe5-617eccd809bc-kube-api-access-zsxp7\") pod \"horizon-d5cdc885-fzw6d\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.691549 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6131d8c6-b2aa-44b9-afe5-617eccd809bc-config-data\") pod \"horizon-d5cdc885-fzw6d\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.691567 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.691583 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.691643 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-logs\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.691677 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6131d8c6-b2aa-44b9-afe5-617eccd809bc-scripts\") pod \"horizon-d5cdc885-fzw6d\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.691709 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6131d8c6-b2aa-44b9-afe5-617eccd809bc-logs\") pod \"horizon-d5cdc885-fzw6d\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.691770 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmm5g\" (UniqueName: \"kubernetes.io/projected/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-kube-api-access-kmm5g\") pod \"placement-db-sync-sclpf\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.693434 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-logs\") pod \"placement-db-sync-sclpf\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.694527 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6131d8c6-b2aa-44b9-afe5-617eccd809bc-config-data\") pod \"horizon-d5cdc885-fzw6d\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.694723 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6131d8c6-b2aa-44b9-afe5-617eccd809bc-scripts\") pod \"horizon-d5cdc885-fzw6d\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.695223 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6131d8c6-b2aa-44b9-afe5-617eccd809bc-logs\") pod \"horizon-d5cdc885-fzw6d\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.698696 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.700768 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-combined-ca-bundle\") pod \"placement-db-sync-sclpf\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.705347 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-scripts\") pod \"placement-db-sync-sclpf\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.707041 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-config-data\") pod \"placement-db-sync-sclpf\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.713772 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6131d8c6-b2aa-44b9-afe5-617eccd809bc-horizon-secret-key\") pod \"horizon-d5cdc885-fzw6d\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.715407 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmm5g\" (UniqueName: \"kubernetes.io/projected/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-kube-api-access-kmm5g\") pod \"placement-db-sync-sclpf\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.724556 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsxp7\" (UniqueName: \"kubernetes.io/projected/6131d8c6-b2aa-44b9-afe5-617eccd809bc-kube-api-access-zsxp7\") pod \"horizon-d5cdc885-fzw6d\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.737026 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.740403 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.743594 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-smmcv"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.749691 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.751639 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.755572 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.755988 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.779196 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.793936 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.794002 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9bafda27-9255-4b5a-921f-69324343b9e0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.794025 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.794046 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.794071 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.794089 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.794242 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-logs\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.794353 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.794405 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.794446 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.794475 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6q5j7\" (UniqueName: \"kubernetes.io/projected/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-kube-api-access-6q5j7\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.794528 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.794577 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.794593 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8vct\" (UniqueName: \"kubernetes.io/projected/9bafda27-9255-4b5a-921f-69324343b9e0-kube-api-access-q8vct\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.794812 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-logs\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.794864 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.794971 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.794995 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-479wq\" (UniqueName: \"kubernetes.io/projected/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-kube-api-access-479wq\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.795088 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-config\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.795111 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-scripts\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.795130 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bafda27-9255-4b5a-921f-69324343b9e0-logs\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.795164 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-config-data\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.795181 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.795204 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.795271 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.805858 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-config-data\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.810597 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-479wq\" (UniqueName: \"kubernetes.io/projected/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-kube-api-access-479wq\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.830915 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.896720 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.896768 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.896814 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9bafda27-9255-4b5a-921f-69324343b9e0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.896836 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.896869 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.896926 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.896946 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.896966 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.896991 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6q5j7\" (UniqueName: \"kubernetes.io/projected/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-kube-api-access-6q5j7\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.897013 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.897040 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8vct\" (UniqueName: \"kubernetes.io/projected/9bafda27-9255-4b5a-921f-69324343b9e0-kube-api-access-q8vct\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.897066 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.897105 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-config\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.897130 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bafda27-9255-4b5a-921f-69324343b9e0-logs\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.897836 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bafda27-9255-4b5a-921f-69324343b9e0-logs\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.898738 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.908109 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9bafda27-9255-4b5a-921f-69324343b9e0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.918127 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.918942 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.919642 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.920340 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.922669 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-config\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.924994 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-6f4g4"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.925320 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.926480 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-6f4g4" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.929822 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.930044 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-qjkbj" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.933316 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.934048 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.940637 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8vct\" (UniqueName: \"kubernetes.io/projected/9bafda27-9255-4b5a-921f-69324343b9e0-kube-api-access-q8vct\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.943670 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6q5j7\" (UniqueName: \"kubernetes.io/projected/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-kube-api-access-6q5j7\") pod \"dnsmasq-dns-8b5c85b87-smmcv\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.944376 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-6f4g4"] Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.948577 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.979545 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.989437 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.998611 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bfs7\" (UniqueName: \"kubernetes.io/projected/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-kube-api-access-5bfs7\") pod \"barbican-db-sync-6f4g4\" (UID: \"b3d0edb2-cca7-4f61-bf72-c3fddb909fea\") " pod="openstack/barbican-db-sync-6f4g4" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.998667 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-db-sync-config-data\") pod \"barbican-db-sync-6f4g4\" (UID: \"b3d0edb2-cca7-4f61-bf72-c3fddb909fea\") " pod="openstack/barbican-db-sync-6f4g4" Dec 02 17:03:05 crc kubenswrapper[4747]: I1202 17:03:05.998784 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-combined-ca-bundle\") pod \"barbican-db-sync-6f4g4\" (UID: \"b3d0edb2-cca7-4f61-bf72-c3fddb909fea\") " pod="openstack/barbican-db-sync-6f4g4" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.023301 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.023815 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-scripts\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.026275 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.041073 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.100449 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bfs7\" (UniqueName: \"kubernetes.io/projected/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-kube-api-access-5bfs7\") pod \"barbican-db-sync-6f4g4\" (UID: \"b3d0edb2-cca7-4f61-bf72-c3fddb909fea\") " pod="openstack/barbican-db-sync-6f4g4" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.100554 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-db-sync-config-data\") pod \"barbican-db-sync-6f4g4\" (UID: \"b3d0edb2-cca7-4f61-bf72-c3fddb909fea\") " pod="openstack/barbican-db-sync-6f4g4" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.100738 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-combined-ca-bundle\") pod \"barbican-db-sync-6f4g4\" (UID: \"b3d0edb2-cca7-4f61-bf72-c3fddb909fea\") " pod="openstack/barbican-db-sync-6f4g4" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.114850 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-combined-ca-bundle\") pod \"barbican-db-sync-6f4g4\" (UID: \"b3d0edb2-cca7-4f61-bf72-c3fddb909fea\") " pod="openstack/barbican-db-sync-6f4g4" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.125518 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-db-sync-config-data\") pod \"barbican-db-sync-6f4g4\" (UID: \"b3d0edb2-cca7-4f61-bf72-c3fddb909fea\") " pod="openstack/barbican-db-sync-6f4g4" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.167243 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bfs7\" (UniqueName: \"kubernetes.io/projected/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-kube-api-access-5bfs7\") pod \"barbican-db-sync-6f4g4\" (UID: \"b3d0edb2-cca7-4f61-bf72-c3fddb909fea\") " pod="openstack/barbican-db-sync-6f4g4" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.167692 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-bgbtw"] Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.169103 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.185993 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.187250 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-jzp2t" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.187381 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.245183 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd"] Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.276093 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-bgbtw"] Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.281613 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.309078 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.325990 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-scripts\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.326121 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-db-sync-config-data\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.326155 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-combined-ca-bundle\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.326238 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-etc-machine-id\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.326301 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6d62m\" (UniqueName: \"kubernetes.io/projected/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-kube-api-access-6d62m\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.326367 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-config-data\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.334614 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-6f4g4" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.378093 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-q7gd9"] Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.388065 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-q5btv"] Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.393965 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-q5btv" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.395048 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-q5btv"] Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.400792 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.401030 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.401878 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-jfm9d" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.427885 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-scripts\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.427998 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-db-sync-config-data\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.428032 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-combined-ca-bundle\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.428154 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-etc-machine-id\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.428268 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6d62m\" (UniqueName: \"kubernetes.io/projected/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-kube-api-access-6d62m\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.428383 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-config-data\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.429067 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-etc-machine-id\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.435848 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-combined-ca-bundle\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.438229 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-scripts\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.438619 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-db-sync-config-data\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.439154 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-config-data\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.451554 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6d62m\" (UniqueName: \"kubernetes.io/projected/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-kube-api-access-6d62m\") pod \"cinder-db-sync-bgbtw\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.530280 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d05e3f55-d7e5-417a-9344-e24038aaa516-config\") pod \"neutron-db-sync-q5btv\" (UID: \"d05e3f55-d7e5-417a-9344-e24038aaa516\") " pod="openstack/neutron-db-sync-q5btv" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.530473 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dps92\" (UniqueName: \"kubernetes.io/projected/d05e3f55-d7e5-417a-9344-e24038aaa516-kube-api-access-dps92\") pod \"neutron-db-sync-q5btv\" (UID: \"d05e3f55-d7e5-417a-9344-e24038aaa516\") " pod="openstack/neutron-db-sync-q5btv" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.530513 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d05e3f55-d7e5-417a-9344-e24038aaa516-combined-ca-bundle\") pod \"neutron-db-sync-q5btv\" (UID: \"d05e3f55-d7e5-417a-9344-e24038aaa516\") " pod="openstack/neutron-db-sync-q5btv" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.636690 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d05e3f55-d7e5-417a-9344-e24038aaa516-config\") pod \"neutron-db-sync-q5btv\" (UID: \"d05e3f55-d7e5-417a-9344-e24038aaa516\") " pod="openstack/neutron-db-sync-q5btv" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.636802 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dps92\" (UniqueName: \"kubernetes.io/projected/d05e3f55-d7e5-417a-9344-e24038aaa516-kube-api-access-dps92\") pod \"neutron-db-sync-q5btv\" (UID: \"d05e3f55-d7e5-417a-9344-e24038aaa516\") " pod="openstack/neutron-db-sync-q5btv" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.636852 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d05e3f55-d7e5-417a-9344-e24038aaa516-combined-ca-bundle\") pod \"neutron-db-sync-q5btv\" (UID: \"d05e3f55-d7e5-417a-9344-e24038aaa516\") " pod="openstack/neutron-db-sync-q5btv" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.643767 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d05e3f55-d7e5-417a-9344-e24038aaa516-config\") pod \"neutron-db-sync-q5btv\" (UID: \"d05e3f55-d7e5-417a-9344-e24038aaa516\") " pod="openstack/neutron-db-sync-q5btv" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.645327 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d05e3f55-d7e5-417a-9344-e24038aaa516-combined-ca-bundle\") pod \"neutron-db-sync-q5btv\" (UID: \"d05e3f55-d7e5-417a-9344-e24038aaa516\") " pod="openstack/neutron-db-sync-q5btv" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.662243 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.687282 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" event={"ID":"67f47fd7-349d-49f7-8b32-eaedbe60cf69","Type":"ContainerStarted","Data":"1bee4f00f2183130d6c9eae80cdb9e2234883e62d54fbc78e218d1b32f7df803"} Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.697999 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q7gd9" event={"ID":"dc039d89-567b-4837-8b5b-a5903c7a437c","Type":"ContainerStarted","Data":"10fa3f0412fe3292f13bed2158b477f96317edec554ad0ab83d37d8db952efb1"} Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.722225 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dps92\" (UniqueName: \"kubernetes.io/projected/d05e3f55-d7e5-417a-9344-e24038aaa516-kube-api-access-dps92\") pod \"neutron-db-sync-q5btv\" (UID: \"d05e3f55-d7e5-417a-9344-e24038aaa516\") " pod="openstack/neutron-db-sync-q5btv" Dec 02 17:03:06 crc kubenswrapper[4747]: I1202 17:03:06.763583 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-568d9bc79c-hb4wc"] Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.018020 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-q5btv" Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.032785 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.421067 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-smmcv"] Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.441194 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-sclpf"] Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.463763 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-d5cdc885-fzw6d"] Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.622438 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-6f4g4"] Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.631244 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-bgbtw"] Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.695105 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-q5btv"] Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.736249 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"210762bb-c409-4ed0-af37-4e70f2c7c955","Type":"ContainerStarted","Data":"5e755634c093ed580c885ba8f2134a4fc62444ab047cdcc74f093478e332fe66"} Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.751635 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q7gd9" event={"ID":"dc039d89-567b-4837-8b5b-a5903c7a437c","Type":"ContainerStarted","Data":"626edf5c8b730915ff5dfe7a71e0ba211533522640a80e114136d58d0e81b81c"} Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.759634 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-568d9bc79c-hb4wc" event={"ID":"c499186d-ded2-4595-a041-07143f5676cd","Type":"ContainerStarted","Data":"426877c942349c1a9f3260de3128e3b7197f3c44e548f766bc8ec0401117752b"} Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.789207 4747 generic.go:334] "Generic (PLEG): container finished" podID="67f47fd7-349d-49f7-8b32-eaedbe60cf69" containerID="f1d635e96ac6b47c42eb5c5135681ce8b6a97b5b4650513b5b402e88284b8af0" exitCode=0 Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.812113 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d5cdc885-fzw6d" event={"ID":"6131d8c6-b2aa-44b9-afe5-617eccd809bc","Type":"ContainerStarted","Data":"92f3dfbbc473420b77cb9087c55544e39707476a8eac37bc02bd11f65d195ade"} Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.812174 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-6f4g4" event={"ID":"b3d0edb2-cca7-4f61-bf72-c3fddb909fea","Type":"ContainerStarted","Data":"e49fb550ebf3a439175c6a6ca3615bbd087756f3c2871cbb46a640fe9e1ec76a"} Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.818863 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sclpf" event={"ID":"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8","Type":"ContainerStarted","Data":"32835bfc0686335264d699db43f5f62e44e73b1ccc30415561ff23aa8f9f4551"} Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.818916 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" event={"ID":"67f47fd7-349d-49f7-8b32-eaedbe60cf69","Type":"ContainerDied","Data":"f1d635e96ac6b47c42eb5c5135681ce8b6a97b5b4650513b5b402e88284b8af0"} Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.818937 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" event={"ID":"34b06f22-5fb1-465e-b20f-27a0cfb5bb35","Type":"ContainerStarted","Data":"d4613a0f943250ad0b29ef507e18fbfd04bd79a64ad55821d0f84e59d8185c27"} Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.819243 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-bgbtw" event={"ID":"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2","Type":"ContainerStarted","Data":"f1ac92c63c80f3857465dea8d6aa661fa7e6dd542affc35e420320dc47a08b96"} Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.827314 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-q7gd9" podStartSLOduration=3.827285293 podStartE2EDuration="3.827285293s" podCreationTimestamp="2025-12-02 17:03:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:03:07.788587654 +0000 UTC m=+1218.315476423" watchObservedRunningTime="2025-12-02 17:03:07.827285293 +0000 UTC m=+1218.354174042" Dec 02 17:03:07 crc kubenswrapper[4747]: I1202 17:03:07.828357 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 17:03:07 crc kubenswrapper[4747]: W1202 17:03:07.847002 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9bafda27_9255_4b5a_921f_69324343b9e0.slice/crio-81739c64482ae1ad0c6d8cd5871f7628f9fdc04588119f354167138a10dc97b2 WatchSource:0}: Error finding container 81739c64482ae1ad0c6d8cd5871f7628f9fdc04588119f354167138a10dc97b2: Status 404 returned error can't find the container with id 81739c64482ae1ad0c6d8cd5871f7628f9fdc04588119f354167138a10dc97b2 Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.203948 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.233305 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-568d9bc79c-hb4wc"] Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.303269 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.323972 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-cd4679699-v9rpl"] Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.327242 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.352108 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-cd4679699-v9rpl"] Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.385749 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.413178 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-logs\") pod \"horizon-cd4679699-v9rpl\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.413240 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-config-data\") pod \"horizon-cd4679699-v9rpl\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.413291 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-horizon-secret-key\") pod \"horizon-cd4679699-v9rpl\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.413427 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7ctv\" (UniqueName: \"kubernetes.io/projected/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-kube-api-access-p7ctv\") pod \"horizon-cd4679699-v9rpl\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.413452 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-scripts\") pod \"horizon-cd4679699-v9rpl\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.468121 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.515202 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-horizon-secret-key\") pod \"horizon-cd4679699-v9rpl\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.515336 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7ctv\" (UniqueName: \"kubernetes.io/projected/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-kube-api-access-p7ctv\") pod \"horizon-cd4679699-v9rpl\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.515363 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-scripts\") pod \"horizon-cd4679699-v9rpl\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.515438 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-logs\") pod \"horizon-cd4679699-v9rpl\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.515468 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-config-data\") pod \"horizon-cd4679699-v9rpl\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.519242 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-config-data\") pod \"horizon-cd4679699-v9rpl\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.519576 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-logs\") pod \"horizon-cd4679699-v9rpl\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.526935 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-scripts\") pod \"horizon-cd4679699-v9rpl\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.559763 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7ctv\" (UniqueName: \"kubernetes.io/projected/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-kube-api-access-p7ctv\") pod \"horizon-cd4679699-v9rpl\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.578090 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-horizon-secret-key\") pod \"horizon-cd4679699-v9rpl\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.603779 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.616477 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-ovsdbserver-nb\") pod \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.616573 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-dns-swift-storage-0\") pod \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.616703 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zngp\" (UniqueName: \"kubernetes.io/projected/67f47fd7-349d-49f7-8b32-eaedbe60cf69-kube-api-access-8zngp\") pod \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.616814 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-dns-svc\") pod \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.616987 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-ovsdbserver-sb\") pod \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.617016 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-config\") pod \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\" (UID: \"67f47fd7-349d-49f7-8b32-eaedbe60cf69\") " Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.628821 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67f47fd7-349d-49f7-8b32-eaedbe60cf69-kube-api-access-8zngp" (OuterVolumeSpecName: "kube-api-access-8zngp") pod "67f47fd7-349d-49f7-8b32-eaedbe60cf69" (UID: "67f47fd7-349d-49f7-8b32-eaedbe60cf69"). InnerVolumeSpecName "kube-api-access-8zngp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.661319 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "67f47fd7-349d-49f7-8b32-eaedbe60cf69" (UID: "67f47fd7-349d-49f7-8b32-eaedbe60cf69"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.661831 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "67f47fd7-349d-49f7-8b32-eaedbe60cf69" (UID: "67f47fd7-349d-49f7-8b32-eaedbe60cf69"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.669443 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "67f47fd7-349d-49f7-8b32-eaedbe60cf69" (UID: "67f47fd7-349d-49f7-8b32-eaedbe60cf69"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.676063 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "67f47fd7-349d-49f7-8b32-eaedbe60cf69" (UID: "67f47fd7-349d-49f7-8b32-eaedbe60cf69"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.683932 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-config" (OuterVolumeSpecName: "config") pod "67f47fd7-349d-49f7-8b32-eaedbe60cf69" (UID: "67f47fd7-349d-49f7-8b32-eaedbe60cf69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.719877 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zngp\" (UniqueName: \"kubernetes.io/projected/67f47fd7-349d-49f7-8b32-eaedbe60cf69-kube-api-access-8zngp\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.719950 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.719968 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.719981 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.719992 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.720002 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67f47fd7-349d-49f7-8b32-eaedbe60cf69-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.759434 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.836958 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9bafda27-9255-4b5a-921f-69324343b9e0","Type":"ContainerStarted","Data":"81739c64482ae1ad0c6d8cd5871f7628f9fdc04588119f354167138a10dc97b2"} Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.863865 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-q5btv" event={"ID":"d05e3f55-d7e5-417a-9344-e24038aaa516","Type":"ContainerStarted","Data":"9c22ee406fa8516908ec60f0cc4a3a8dacc8dcb202d0f8112b59ee1aa68bf9e0"} Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.863932 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-q5btv" event={"ID":"d05e3f55-d7e5-417a-9344-e24038aaa516","Type":"ContainerStarted","Data":"b692fed4be970e4fcd8728153820626292e488411311da4f761e8a918c5931dc"} Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.883121 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-q5btv" podStartSLOduration=2.883100535 podStartE2EDuration="2.883100535s" podCreationTimestamp="2025-12-02 17:03:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:03:08.881954003 +0000 UTC m=+1219.408842752" watchObservedRunningTime="2025-12-02 17:03:08.883100535 +0000 UTC m=+1219.409989284" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.890402 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" event={"ID":"67f47fd7-349d-49f7-8b32-eaedbe60cf69","Type":"ContainerDied","Data":"1bee4f00f2183130d6c9eae80cdb9e2234883e62d54fbc78e218d1b32f7df803"} Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.890482 4747 scope.go:117] "RemoveContainer" containerID="f1d635e96ac6b47c42eb5c5135681ce8b6a97b5b4650513b5b402e88284b8af0" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.890649 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd" Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.898744 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec","Type":"ContainerStarted","Data":"867c5028c1eccdea176ea31bbc9144e96de7f7bf974856a6451651fcdc4d2444"} Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.903293 4747 generic.go:334] "Generic (PLEG): container finished" podID="34b06f22-5fb1-465e-b20f-27a0cfb5bb35" containerID="f6e3f72a0b874effb25955ebc96e8be89c98dc92cdea4cae52a625c63154b3c3" exitCode=0 Dec 02 17:03:08 crc kubenswrapper[4747]: I1202 17:03:08.905041 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" event={"ID":"34b06f22-5fb1-465e-b20f-27a0cfb5bb35","Type":"ContainerDied","Data":"f6e3f72a0b874effb25955ebc96e8be89c98dc92cdea4cae52a625c63154b3c3"} Dec 02 17:03:09 crc kubenswrapper[4747]: I1202 17:03:09.018049 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd"] Dec 02 17:03:09 crc kubenswrapper[4747]: I1202 17:03:09.047321 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-wwmgd"] Dec 02 17:03:09 crc kubenswrapper[4747]: E1202 17:03:09.195160 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67f47fd7_349d_49f7_8b32_eaedbe60cf69.slice/crio-1bee4f00f2183130d6c9eae80cdb9e2234883e62d54fbc78e218d1b32f7df803\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67f47fd7_349d_49f7_8b32_eaedbe60cf69.slice\": RecentStats: unable to find data in memory cache]" Dec 02 17:03:09 crc kubenswrapper[4747]: I1202 17:03:09.520847 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-cd4679699-v9rpl"] Dec 02 17:03:09 crc kubenswrapper[4747]: W1202 17:03:09.561529 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda4e4fe63_a9e0_4903_be72_bc14a8d3113b.slice/crio-0f3680ff0b5d7fbf9abe63d148ab18302f50b44f1bad4b23b0efe04e26933061 WatchSource:0}: Error finding container 0f3680ff0b5d7fbf9abe63d148ab18302f50b44f1bad4b23b0efe04e26933061: Status 404 returned error can't find the container with id 0f3680ff0b5d7fbf9abe63d148ab18302f50b44f1bad4b23b0efe04e26933061 Dec 02 17:03:09 crc kubenswrapper[4747]: I1202 17:03:09.795359 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67f47fd7-349d-49f7-8b32-eaedbe60cf69" path="/var/lib/kubelet/pods/67f47fd7-349d-49f7-8b32-eaedbe60cf69/volumes" Dec 02 17:03:09 crc kubenswrapper[4747]: I1202 17:03:09.939363 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" event={"ID":"34b06f22-5fb1-465e-b20f-27a0cfb5bb35","Type":"ContainerStarted","Data":"3f2032b8822d8aa7ab6105e46aa7a4ff3d59c810620a7fbe678fe1e7252aa2f9"} Dec 02 17:03:09 crc kubenswrapper[4747]: I1202 17:03:09.949164 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:09 crc kubenswrapper[4747]: I1202 17:03:09.951439 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-cd4679699-v9rpl" event={"ID":"a4e4fe63-a9e0-4903-be72-bc14a8d3113b","Type":"ContainerStarted","Data":"0f3680ff0b5d7fbf9abe63d148ab18302f50b44f1bad4b23b0efe04e26933061"} Dec 02 17:03:09 crc kubenswrapper[4747]: I1202 17:03:09.961942 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9bafda27-9255-4b5a-921f-69324343b9e0","Type":"ContainerStarted","Data":"9331fccf82f9f276b315a0c5e5a474f3c668cd82d1e40abf64db5664f4439c0e"} Dec 02 17:03:10 crc kubenswrapper[4747]: I1202 17:03:10.082715 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" podStartSLOduration=5.082688428 podStartE2EDuration="5.082688428s" podCreationTimestamp="2025-12-02 17:03:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:03:10.081110224 +0000 UTC m=+1220.607998973" watchObservedRunningTime="2025-12-02 17:03:10.082688428 +0000 UTC m=+1220.609577177" Dec 02 17:03:10 crc kubenswrapper[4747]: I1202 17:03:10.985589 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec","Type":"ContainerStarted","Data":"4eb991c0cd52d31fbe2cf1164339d5c04498127b5366f83075522e9d2d3120ad"} Dec 02 17:03:12 crc kubenswrapper[4747]: I1202 17:03:12.004212 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9bafda27-9255-4b5a-921f-69324343b9e0","Type":"ContainerStarted","Data":"b4b794ba5ef57686542a7628886e3a73e92dd9ba7de9988196484760891dfd9c"} Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.025299 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec","Type":"ContainerStarted","Data":"f803f943eb3ed7f15daf5407d94e6199c451b9d2392a838356dbd3be677b2301"} Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.025366 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9bafda27-9255-4b5a-921f-69324343b9e0" containerName="glance-log" containerID="cri-o://9331fccf82f9f276b315a0c5e5a474f3c668cd82d1e40abf64db5664f4439c0e" gracePeriod=30 Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.028055 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9bafda27-9255-4b5a-921f-69324343b9e0" containerName="glance-httpd" containerID="cri-o://b4b794ba5ef57686542a7628886e3a73e92dd9ba7de9988196484760891dfd9c" gracePeriod=30 Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.025401 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" containerName="glance-log" containerID="cri-o://4eb991c0cd52d31fbe2cf1164339d5c04498127b5366f83075522e9d2d3120ad" gracePeriod=30 Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.025471 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" containerName="glance-httpd" containerID="cri-o://f803f943eb3ed7f15daf5407d94e6199c451b9d2392a838356dbd3be677b2301" gracePeriod=30 Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.056712 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.056687955 podStartE2EDuration="8.056687955s" podCreationTimestamp="2025-12-02 17:03:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:03:13.047172775 +0000 UTC m=+1223.574061544" watchObservedRunningTime="2025-12-02 17:03:13.056687955 +0000 UTC m=+1223.583576704" Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.076589 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=8.076559819 podStartE2EDuration="8.076559819s" podCreationTimestamp="2025-12-02 17:03:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:03:13.074056588 +0000 UTC m=+1223.600945337" watchObservedRunningTime="2025-12-02 17:03:13.076559819 +0000 UTC m=+1223.603448568" Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.939819 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-d5cdc885-fzw6d"] Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.965202 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5cc747446d-fcnwd"] Dec 02 17:03:13 crc kubenswrapper[4747]: E1202 17:03:13.965675 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67f47fd7-349d-49f7-8b32-eaedbe60cf69" containerName="init" Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.965696 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="67f47fd7-349d-49f7-8b32-eaedbe60cf69" containerName="init" Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.965858 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="67f47fd7-349d-49f7-8b32-eaedbe60cf69" containerName="init" Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.968295 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.974029 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.983201 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5cc747446d-fcnwd"] Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.990001 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-cd4679699-v9rpl"] Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.996143 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89kzm\" (UniqueName: \"kubernetes.io/projected/b26d33d5-1b96-470b-8677-cb5273c72d25-kube-api-access-89kzm\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.996235 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-horizon-tls-certs\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.996352 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b26d33d5-1b96-470b-8677-cb5273c72d25-config-data\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.996388 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b26d33d5-1b96-470b-8677-cb5273c72d25-logs\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.996438 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-combined-ca-bundle\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.996463 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b26d33d5-1b96-470b-8677-cb5273c72d25-scripts\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:13 crc kubenswrapper[4747]: I1202 17:03:13.996503 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-horizon-secret-key\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.008049 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7c85649748-scrqf"] Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.013139 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.058726 4747 generic.go:334] "Generic (PLEG): container finished" podID="18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" containerID="4eb991c0cd52d31fbe2cf1164339d5c04498127b5366f83075522e9d2d3120ad" exitCode=143 Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.059208 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec","Type":"ContainerDied","Data":"4eb991c0cd52d31fbe2cf1164339d5c04498127b5366f83075522e9d2d3120ad"} Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.063451 4747 generic.go:334] "Generic (PLEG): container finished" podID="9bafda27-9255-4b5a-921f-69324343b9e0" containerID="9331fccf82f9f276b315a0c5e5a474f3c668cd82d1e40abf64db5664f4439c0e" exitCode=143 Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.063511 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9bafda27-9255-4b5a-921f-69324343b9e0","Type":"ContainerDied","Data":"9331fccf82f9f276b315a0c5e5a474f3c668cd82d1e40abf64db5664f4439c0e"} Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.065278 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7c85649748-scrqf"] Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.100032 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89kzm\" (UniqueName: \"kubernetes.io/projected/b26d33d5-1b96-470b-8677-cb5273c72d25-kube-api-access-89kzm\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.100117 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-horizon-tls-certs\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.100199 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b26d33d5-1b96-470b-8677-cb5273c72d25-config-data\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.100239 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b26d33d5-1b96-470b-8677-cb5273c72d25-logs\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.100280 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-combined-ca-bundle\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.100304 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b26d33d5-1b96-470b-8677-cb5273c72d25-scripts\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.100332 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-horizon-secret-key\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.103457 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b26d33d5-1b96-470b-8677-cb5273c72d25-scripts\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.103831 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b26d33d5-1b96-470b-8677-cb5273c72d25-logs\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.104702 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b26d33d5-1b96-470b-8677-cb5273c72d25-config-data\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.109702 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-combined-ca-bundle\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.123370 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-horizon-tls-certs\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.128329 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89kzm\" (UniqueName: \"kubernetes.io/projected/b26d33d5-1b96-470b-8677-cb5273c72d25-kube-api-access-89kzm\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.126677 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-horizon-secret-key\") pod \"horizon-5cc747446d-fcnwd\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.205086 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-horizon-tls-certs\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.205182 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-logs\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.205508 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-config-data\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.205588 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-scripts\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.205652 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zm8g\" (UniqueName: \"kubernetes.io/projected/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-kube-api-access-4zm8g\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.205728 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-horizon-secret-key\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.205885 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-combined-ca-bundle\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.308548 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-horizon-tls-certs\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.308631 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-logs\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.308672 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-config-data\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.308699 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-scripts\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.308730 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zm8g\" (UniqueName: \"kubernetes.io/projected/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-kube-api-access-4zm8g\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.308768 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-horizon-secret-key\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.308827 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-combined-ca-bundle\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.309740 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-logs\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.310127 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-scripts\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.310439 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-config-data\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.310775 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.312593 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-horizon-tls-certs\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.316362 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-combined-ca-bundle\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.321749 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-horizon-secret-key\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.351721 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zm8g\" (UniqueName: \"kubernetes.io/projected/4ec3f1d7-119b-40fa-b0f4-3d2f353ee162-kube-api-access-4zm8g\") pod \"horizon-7c85649748-scrqf\" (UID: \"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162\") " pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:14 crc kubenswrapper[4747]: I1202 17:03:14.637846 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:16 crc kubenswrapper[4747]: I1202 17:03:16.044676 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:16 crc kubenswrapper[4747]: I1202 17:03:16.125621 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-7cgwn"] Dec 02 17:03:16 crc kubenswrapper[4747]: I1202 17:03:16.125884 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" podUID="6137dd81-a215-4037-ade0-f3b2ddb4573b" containerName="dnsmasq-dns" containerID="cri-o://1cf16f0c75df1a85e8936e861c0d671f1208acbbe1bc7af03fd232faea770196" gracePeriod=10 Dec 02 17:03:16 crc kubenswrapper[4747]: I1202 17:03:16.132705 4747 generic.go:334] "Generic (PLEG): container finished" podID="9bafda27-9255-4b5a-921f-69324343b9e0" containerID="b4b794ba5ef57686542a7628886e3a73e92dd9ba7de9988196484760891dfd9c" exitCode=0 Dec 02 17:03:16 crc kubenswrapper[4747]: I1202 17:03:16.132827 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9bafda27-9255-4b5a-921f-69324343b9e0","Type":"ContainerDied","Data":"b4b794ba5ef57686542a7628886e3a73e92dd9ba7de9988196484760891dfd9c"} Dec 02 17:03:16 crc kubenswrapper[4747]: I1202 17:03:16.149426 4747 generic.go:334] "Generic (PLEG): container finished" podID="18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" containerID="f803f943eb3ed7f15daf5407d94e6199c451b9d2392a838356dbd3be677b2301" exitCode=0 Dec 02 17:03:16 crc kubenswrapper[4747]: I1202 17:03:16.149478 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec","Type":"ContainerDied","Data":"f803f943eb3ed7f15daf5407d94e6199c451b9d2392a838356dbd3be677b2301"} Dec 02 17:03:17 crc kubenswrapper[4747]: I1202 17:03:17.199393 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" podUID="6137dd81-a215-4037-ade0-f3b2ddb4573b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.126:5353: connect: connection refused" Dec 02 17:03:18 crc kubenswrapper[4747]: I1202 17:03:18.177649 4747 generic.go:334] "Generic (PLEG): container finished" podID="6137dd81-a215-4037-ade0-f3b2ddb4573b" containerID="1cf16f0c75df1a85e8936e861c0d671f1208acbbe1bc7af03fd232faea770196" exitCode=0 Dec 02 17:03:18 crc kubenswrapper[4747]: I1202 17:03:18.177741 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" event={"ID":"6137dd81-a215-4037-ade0-f3b2ddb4573b","Type":"ContainerDied","Data":"1cf16f0c75df1a85e8936e861c0d671f1208acbbe1bc7af03fd232faea770196"} Dec 02 17:03:19 crc kubenswrapper[4747]: I1202 17:03:19.192251 4747 generic.go:334] "Generic (PLEG): container finished" podID="dc039d89-567b-4837-8b5b-a5903c7a437c" containerID="626edf5c8b730915ff5dfe7a71e0ba211533522640a80e114136d58d0e81b81c" exitCode=0 Dec 02 17:03:19 crc kubenswrapper[4747]: I1202 17:03:19.192342 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q7gd9" event={"ID":"dc039d89-567b-4837-8b5b-a5903c7a437c","Type":"ContainerDied","Data":"626edf5c8b730915ff5dfe7a71e0ba211533522640a80e114136d58d0e81b81c"} Dec 02 17:03:26 crc kubenswrapper[4747]: E1202 17:03:26.594088 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 02 17:03:26 crc kubenswrapper[4747]: E1202 17:03:26.594795 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n55bhb4h699h99h8bh676h5d9hf6h5dfh567h59dh77h5c7hd6h85h5bh85h54h86h5ffh597h667h5d9h544h576h7dh6fh675hfch675h66ch655q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zsxp7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-d5cdc885-fzw6d_openstack(6131d8c6-b2aa-44b9-afe5-617eccd809bc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 17:03:26 crc kubenswrapper[4747]: E1202 17:03:26.597855 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-d5cdc885-fzw6d" podUID="6131d8c6-b2aa-44b9-afe5-617eccd809bc" Dec 02 17:03:27 crc kubenswrapper[4747]: I1202 17:03:27.199212 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" podUID="6137dd81-a215-4037-ade0-f3b2ddb4573b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.126:5353: i/o timeout" Dec 02 17:03:28 crc kubenswrapper[4747]: E1202 17:03:28.692555 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Dec 02 17:03:28 crc kubenswrapper[4747]: E1202 17:03:28.693694 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kmm5g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-sclpf_openstack(b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 17:03:28 crc kubenswrapper[4747]: E1202 17:03:28.694853 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-sclpf" podUID="b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8" Dec 02 17:03:28 crc kubenswrapper[4747]: E1202 17:03:28.760106 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 02 17:03:28 crc kubenswrapper[4747]: E1202 17:03:28.760296 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n8fhc8h5d8h78hbh59ch68h5f4h57ch647h579hbfhf4h684h89h555h5f4h5d5h657h55chfhf6hf7h6h5d8h5d9h67fh9ch69h659h5c7h599q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-p7ctv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-cd4679699-v9rpl_openstack(a4e4fe63-a9e0-4903-be72-bc14a8d3113b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 17:03:28 crc kubenswrapper[4747]: E1202 17:03:28.774188 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-cd4679699-v9rpl" podUID="a4e4fe63-a9e0-4903-be72-bc14a8d3113b" Dec 02 17:03:29 crc kubenswrapper[4747]: E1202 17:03:29.295968 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-sclpf" podUID="b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8" Dec 02 17:03:31 crc kubenswrapper[4747]: I1202 17:03:31.795490 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:03:31 crc kubenswrapper[4747]: I1202 17:03:31.796141 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:03:32 crc kubenswrapper[4747]: I1202 17:03:32.200094 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" podUID="6137dd81-a215-4037-ade0-f3b2ddb4573b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.126:5353: i/o timeout" Dec 02 17:03:32 crc kubenswrapper[4747]: I1202 17:03:32.200401 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:03:36 crc kubenswrapper[4747]: I1202 17:03:36.282016 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 17:03:36 crc kubenswrapper[4747]: I1202 17:03:36.282505 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 17:03:36 crc kubenswrapper[4747]: I1202 17:03:36.310507 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:36 crc kubenswrapper[4747]: I1202 17:03:36.310565 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.200531 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" podUID="6137dd81-a215-4037-ade0-f3b2ddb4573b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.126:5353: i/o timeout" Dec 02 17:03:37 crc kubenswrapper[4747]: E1202 17:03:37.565421 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 02 17:03:37 crc kubenswrapper[4747]: E1202 17:03:37.565613 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n66hbdh66ch66dh5fdhc4hbch59bh66h556h556h556h54fhbfh57chd8h586h564hf9h7fh5c5h588h85hdfh599h5f6h97h668h5c9h5dch565h98q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-52df8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-568d9bc79c-hb4wc_openstack(c499186d-ded2-4595-a041-07143f5676cd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 17:03:37 crc kubenswrapper[4747]: E1202 17:03:37.573258 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-568d9bc79c-hb4wc" podUID="c499186d-ded2-4595-a041-07143f5676cd" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.686842 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.692333 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.795631 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-combined-ca-bundle\") pod \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.795744 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-config-data\") pod \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.795778 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-public-tls-certs\") pod \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.795834 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7ctv\" (UniqueName: \"kubernetes.io/projected/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-kube-api-access-p7ctv\") pod \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.795898 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-logs\") pod \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.795944 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-logs\") pod \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.795974 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-479wq\" (UniqueName: \"kubernetes.io/projected/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-kube-api-access-479wq\") pod \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.795989 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-scripts\") pod \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.796009 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-scripts\") pod \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.796054 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-config-data\") pod \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.796086 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.796148 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-httpd-run\") pod \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\" (UID: \"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec\") " Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.796174 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-horizon-secret-key\") pod \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\" (UID: \"a4e4fe63-a9e0-4903-be72-bc14a8d3113b\") " Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.796996 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-logs" (OuterVolumeSpecName: "logs") pod "18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" (UID: "18cb7361-76fe-4a4b-8c60-a82c7e0b16ec"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.797093 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-scripts" (OuterVolumeSpecName: "scripts") pod "a4e4fe63-a9e0-4903-be72-bc14a8d3113b" (UID: "a4e4fe63-a9e0-4903-be72-bc14a8d3113b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.797125 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-logs" (OuterVolumeSpecName: "logs") pod "a4e4fe63-a9e0-4903-be72-bc14a8d3113b" (UID: "a4e4fe63-a9e0-4903-be72-bc14a8d3113b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.797364 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-config-data" (OuterVolumeSpecName: "config-data") pod "a4e4fe63-a9e0-4903-be72-bc14a8d3113b" (UID: "a4e4fe63-a9e0-4903-be72-bc14a8d3113b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.798681 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" (UID: "18cb7361-76fe-4a4b-8c60-a82c7e0b16ec"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.803807 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-kube-api-access-479wq" (OuterVolumeSpecName: "kube-api-access-479wq") pod "18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" (UID: "18cb7361-76fe-4a4b-8c60-a82c7e0b16ec"). InnerVolumeSpecName "kube-api-access-479wq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.803816 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" (UID: "18cb7361-76fe-4a4b-8c60-a82c7e0b16ec"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.804429 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-scripts" (OuterVolumeSpecName: "scripts") pod "18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" (UID: "18cb7361-76fe-4a4b-8c60-a82c7e0b16ec"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.804711 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-kube-api-access-p7ctv" (OuterVolumeSpecName: "kube-api-access-p7ctv") pod "a4e4fe63-a9e0-4903-be72-bc14a8d3113b" (UID: "a4e4fe63-a9e0-4903-be72-bc14a8d3113b"). InnerVolumeSpecName "kube-api-access-p7ctv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.804915 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "a4e4fe63-a9e0-4903-be72-bc14a8d3113b" (UID: "a4e4fe63-a9e0-4903-be72-bc14a8d3113b"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.836349 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" (UID: "18cb7361-76fe-4a4b-8c60-a82c7e0b16ec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.855255 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-config-data" (OuterVolumeSpecName: "config-data") pod "18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" (UID: "18cb7361-76fe-4a4b-8c60-a82c7e0b16ec"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.861105 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" (UID: "18cb7361-76fe-4a4b-8c60-a82c7e0b16ec"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.898671 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.898893 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.898951 4747 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.898967 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.898977 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.898986 4747 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.898996 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7ctv\" (UniqueName: \"kubernetes.io/projected/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-kube-api-access-p7ctv\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.899008 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-logs\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.899017 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-logs\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.899027 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-479wq\" (UniqueName: \"kubernetes.io/projected/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-kube-api-access-479wq\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.899050 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.899060 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a4e4fe63-a9e0-4903-be72-bc14a8d3113b-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.899070 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:37 crc kubenswrapper[4747]: I1202 17:03:37.918017 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.000358 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.382373 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-cd4679699-v9rpl" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.382364 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-cd4679699-v9rpl" event={"ID":"a4e4fe63-a9e0-4903-be72-bc14a8d3113b","Type":"ContainerDied","Data":"0f3680ff0b5d7fbf9abe63d148ab18302f50b44f1bad4b23b0efe04e26933061"} Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.392077 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.400118 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18cb7361-76fe-4a4b-8c60-a82c7e0b16ec","Type":"ContainerDied","Data":"867c5028c1eccdea176ea31bbc9144e96de7f7bf974856a6451651fcdc4d2444"} Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.400206 4747 scope.go:117] "RemoveContainer" containerID="f803f943eb3ed7f15daf5407d94e6199c451b9d2392a838356dbd3be677b2301" Dec 02 17:03:38 crc kubenswrapper[4747]: E1202 17:03:38.419708 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 02 17:03:38 crc kubenswrapper[4747]: E1202 17:03:38.419943 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5bfs7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-6f4g4_openstack(b3d0edb2-cca7-4f61-bf72-c3fddb909fea): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 17:03:38 crc kubenswrapper[4747]: E1202 17:03:38.422399 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-6f4g4" podUID="b3d0edb2-cca7-4f61-bf72-c3fddb909fea" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.545426 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.611967 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-cd4679699-v9rpl"] Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.627607 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.632710 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-cd4679699-v9rpl"] Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.641999 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.669969 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.678413 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.683454 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.695139 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 17:03:38 crc kubenswrapper[4747]: E1202 17:03:38.695571 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" containerName="glance-httpd" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.695585 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" containerName="glance-httpd" Dec 02 17:03:38 crc kubenswrapper[4747]: E1202 17:03:38.695593 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" containerName="glance-log" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.695600 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" containerName="glance-log" Dec 02 17:03:38 crc kubenswrapper[4747]: E1202 17:03:38.695621 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6137dd81-a215-4037-ade0-f3b2ddb4573b" containerName="init" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.695627 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6137dd81-a215-4037-ade0-f3b2ddb4573b" containerName="init" Dec 02 17:03:38 crc kubenswrapper[4747]: E1202 17:03:38.695643 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6137dd81-a215-4037-ade0-f3b2ddb4573b" containerName="dnsmasq-dns" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.695649 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6137dd81-a215-4037-ade0-f3b2ddb4573b" containerName="dnsmasq-dns" Dec 02 17:03:38 crc kubenswrapper[4747]: E1202 17:03:38.695662 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bafda27-9255-4b5a-921f-69324343b9e0" containerName="glance-log" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.695667 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bafda27-9255-4b5a-921f-69324343b9e0" containerName="glance-log" Dec 02 17:03:38 crc kubenswrapper[4747]: E1202 17:03:38.695688 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc039d89-567b-4837-8b5b-a5903c7a437c" containerName="keystone-bootstrap" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.695694 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc039d89-567b-4837-8b5b-a5903c7a437c" containerName="keystone-bootstrap" Dec 02 17:03:38 crc kubenswrapper[4747]: E1202 17:03:38.695704 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bafda27-9255-4b5a-921f-69324343b9e0" containerName="glance-httpd" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.695709 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bafda27-9255-4b5a-921f-69324343b9e0" containerName="glance-httpd" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.695870 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bafda27-9255-4b5a-921f-69324343b9e0" containerName="glance-httpd" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.695886 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6137dd81-a215-4037-ade0-f3b2ddb4573b" containerName="dnsmasq-dns" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.695896 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" containerName="glance-httpd" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.695926 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bafda27-9255-4b5a-921f-69324343b9e0" containerName="glance-log" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.695934 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" containerName="glance-log" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.695945 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc039d89-567b-4837-8b5b-a5903c7a437c" containerName="keystone-bootstrap" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.698444 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.701658 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.702003 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.710388 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.721968 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wggzt\" (UniqueName: \"kubernetes.io/projected/6137dd81-a215-4037-ade0-f3b2ddb4573b-kube-api-access-wggzt\") pod \"6137dd81-a215-4037-ade0-f3b2ddb4573b\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.722122 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-fernet-keys\") pod \"dc039d89-567b-4837-8b5b-a5903c7a437c\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.722143 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-config-data\") pod \"dc039d89-567b-4837-8b5b-a5903c7a437c\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.722179 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-dns-swift-storage-0\") pod \"6137dd81-a215-4037-ade0-f3b2ddb4573b\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.722221 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-dns-svc\") pod \"6137dd81-a215-4037-ade0-f3b2ddb4573b\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.722238 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-ovsdbserver-sb\") pod \"6137dd81-a215-4037-ade0-f3b2ddb4573b\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.722277 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6131d8c6-b2aa-44b9-afe5-617eccd809bc-horizon-secret-key\") pod \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.722336 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-combined-ca-bundle\") pod \"dc039d89-567b-4837-8b5b-a5903c7a437c\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.722369 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-scripts\") pod \"dc039d89-567b-4837-8b5b-a5903c7a437c\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.722388 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-credential-keys\") pod \"dc039d89-567b-4837-8b5b-a5903c7a437c\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.722411 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tfkl\" (UniqueName: \"kubernetes.io/projected/dc039d89-567b-4837-8b5b-a5903c7a437c-kube-api-access-8tfkl\") pod \"dc039d89-567b-4837-8b5b-a5903c7a437c\" (UID: \"dc039d89-567b-4837-8b5b-a5903c7a437c\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.722434 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6131d8c6-b2aa-44b9-afe5-617eccd809bc-config-data\") pod \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.722462 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6131d8c6-b2aa-44b9-afe5-617eccd809bc-scripts\") pod \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.722478 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6131d8c6-b2aa-44b9-afe5-617eccd809bc-logs\") pod \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.722505 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-ovsdbserver-nb\") pod \"6137dd81-a215-4037-ade0-f3b2ddb4573b\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.722525 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-config\") pod \"6137dd81-a215-4037-ade0-f3b2ddb4573b\" (UID: \"6137dd81-a215-4037-ade0-f3b2ddb4573b\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.722542 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zsxp7\" (UniqueName: \"kubernetes.io/projected/6131d8c6-b2aa-44b9-afe5-617eccd809bc-kube-api-access-zsxp7\") pod \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\" (UID: \"6131d8c6-b2aa-44b9-afe5-617eccd809bc\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.727255 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6131d8c6-b2aa-44b9-afe5-617eccd809bc-config-data" (OuterVolumeSpecName: "config-data") pod "6131d8c6-b2aa-44b9-afe5-617eccd809bc" (UID: "6131d8c6-b2aa-44b9-afe5-617eccd809bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.733026 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6131d8c6-b2aa-44b9-afe5-617eccd809bc-scripts" (OuterVolumeSpecName: "scripts") pod "6131d8c6-b2aa-44b9-afe5-617eccd809bc" (UID: "6131d8c6-b2aa-44b9-afe5-617eccd809bc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.734801 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6131d8c6-b2aa-44b9-afe5-617eccd809bc-logs" (OuterVolumeSpecName: "logs") pod "6131d8c6-b2aa-44b9-afe5-617eccd809bc" (UID: "6131d8c6-b2aa-44b9-afe5-617eccd809bc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.748921 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6131d8c6-b2aa-44b9-afe5-617eccd809bc-kube-api-access-zsxp7" (OuterVolumeSpecName: "kube-api-access-zsxp7") pod "6131d8c6-b2aa-44b9-afe5-617eccd809bc" (UID: "6131d8c6-b2aa-44b9-afe5-617eccd809bc"). InnerVolumeSpecName "kube-api-access-zsxp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.766227 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-scripts" (OuterVolumeSpecName: "scripts") pod "dc039d89-567b-4837-8b5b-a5903c7a437c" (UID: "dc039d89-567b-4837-8b5b-a5903c7a437c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.772247 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6131d8c6-b2aa-44b9-afe5-617eccd809bc-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "6131d8c6-b2aa-44b9-afe5-617eccd809bc" (UID: "6131d8c6-b2aa-44b9-afe5-617eccd809bc"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.772762 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc039d89-567b-4837-8b5b-a5903c7a437c-kube-api-access-8tfkl" (OuterVolumeSpecName: "kube-api-access-8tfkl") pod "dc039d89-567b-4837-8b5b-a5903c7a437c" (UID: "dc039d89-567b-4837-8b5b-a5903c7a437c"). InnerVolumeSpecName "kube-api-access-8tfkl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.775970 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6137dd81-a215-4037-ade0-f3b2ddb4573b-kube-api-access-wggzt" (OuterVolumeSpecName: "kube-api-access-wggzt") pod "6137dd81-a215-4037-ade0-f3b2ddb4573b" (UID: "6137dd81-a215-4037-ade0-f3b2ddb4573b"). InnerVolumeSpecName "kube-api-access-wggzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.784127 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "dc039d89-567b-4837-8b5b-a5903c7a437c" (UID: "dc039d89-567b-4837-8b5b-a5903c7a437c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.784101 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "dc039d89-567b-4837-8b5b-a5903c7a437c" (UID: "dc039d89-567b-4837-8b5b-a5903c7a437c"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.797451 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc039d89-567b-4837-8b5b-a5903c7a437c" (UID: "dc039d89-567b-4837-8b5b-a5903c7a437c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.823800 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-scripts\") pod \"9bafda27-9255-4b5a-921f-69324343b9e0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.824036 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-combined-ca-bundle\") pod \"9bafda27-9255-4b5a-921f-69324343b9e0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.824135 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bafda27-9255-4b5a-921f-69324343b9e0-logs\") pod \"9bafda27-9255-4b5a-921f-69324343b9e0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.824191 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8vct\" (UniqueName: \"kubernetes.io/projected/9bafda27-9255-4b5a-921f-69324343b9e0-kube-api-access-q8vct\") pod \"9bafda27-9255-4b5a-921f-69324343b9e0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.824360 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"9bafda27-9255-4b5a-921f-69324343b9e0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.824448 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9bafda27-9255-4b5a-921f-69324343b9e0-httpd-run\") pod \"9bafda27-9255-4b5a-921f-69324343b9e0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.824598 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-config-data\") pod \"9bafda27-9255-4b5a-921f-69324343b9e0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.824717 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-internal-tls-certs\") pod \"9bafda27-9255-4b5a-921f-69324343b9e0\" (UID: \"9bafda27-9255-4b5a-921f-69324343b9e0\") " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.825138 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6m74\" (UniqueName: \"kubernetes.io/projected/ce4f4582-9147-4f5a-bf73-7d86cf4298da-kube-api-access-f6m74\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.825204 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-config-data\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.825368 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-scripts\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.825392 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.825502 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.825538 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.825557 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce4f4582-9147-4f5a-bf73-7d86cf4298da-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.825604 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce4f4582-9147-4f5a-bf73-7d86cf4298da-logs\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.825748 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.825762 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.826477 4747 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.826495 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tfkl\" (UniqueName: \"kubernetes.io/projected/dc039d89-567b-4837-8b5b-a5903c7a437c-kube-api-access-8tfkl\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.826511 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6131d8c6-b2aa-44b9-afe5-617eccd809bc-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.826536 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6131d8c6-b2aa-44b9-afe5-617eccd809bc-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.826547 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6131d8c6-b2aa-44b9-afe5-617eccd809bc-logs\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.826560 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zsxp7\" (UniqueName: \"kubernetes.io/projected/6131d8c6-b2aa-44b9-afe5-617eccd809bc-kube-api-access-zsxp7\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.826571 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wggzt\" (UniqueName: \"kubernetes.io/projected/6137dd81-a215-4037-ade0-f3b2ddb4573b-kube-api-access-wggzt\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.826582 4747 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.826593 4747 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6131d8c6-b2aa-44b9-afe5-617eccd809bc-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.827625 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9bafda27-9255-4b5a-921f-69324343b9e0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9bafda27-9255-4b5a-921f-69324343b9e0" (UID: "9bafda27-9255-4b5a-921f-69324343b9e0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.828785 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9bafda27-9255-4b5a-921f-69324343b9e0-logs" (OuterVolumeSpecName: "logs") pod "9bafda27-9255-4b5a-921f-69324343b9e0" (UID: "9bafda27-9255-4b5a-921f-69324343b9e0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.829422 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-scripts" (OuterVolumeSpecName: "scripts") pod "9bafda27-9255-4b5a-921f-69324343b9e0" (UID: "9bafda27-9255-4b5a-921f-69324343b9e0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.831176 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bafda27-9255-4b5a-921f-69324343b9e0-kube-api-access-q8vct" (OuterVolumeSpecName: "kube-api-access-q8vct") pod "9bafda27-9255-4b5a-921f-69324343b9e0" (UID: "9bafda27-9255-4b5a-921f-69324343b9e0"). InnerVolumeSpecName "kube-api-access-q8vct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.848127 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "9bafda27-9255-4b5a-921f-69324343b9e0" (UID: "9bafda27-9255-4b5a-921f-69324343b9e0"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.865843 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-config-data" (OuterVolumeSpecName: "config-data") pod "dc039d89-567b-4837-8b5b-a5903c7a437c" (UID: "dc039d89-567b-4837-8b5b-a5903c7a437c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.878005 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6137dd81-a215-4037-ade0-f3b2ddb4573b" (UID: "6137dd81-a215-4037-ade0-f3b2ddb4573b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.880220 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6137dd81-a215-4037-ade0-f3b2ddb4573b" (UID: "6137dd81-a215-4037-ade0-f3b2ddb4573b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.888747 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6137dd81-a215-4037-ade0-f3b2ddb4573b" (UID: "6137dd81-a215-4037-ade0-f3b2ddb4573b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.892357 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9bafda27-9255-4b5a-921f-69324343b9e0" (UID: "9bafda27-9255-4b5a-921f-69324343b9e0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.899475 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6137dd81-a215-4037-ade0-f3b2ddb4573b" (UID: "6137dd81-a215-4037-ade0-f3b2ddb4573b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.900508 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-config" (OuterVolumeSpecName: "config") pod "6137dd81-a215-4037-ade0-f3b2ddb4573b" (UID: "6137dd81-a215-4037-ade0-f3b2ddb4573b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.913798 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-config-data" (OuterVolumeSpecName: "config-data") pod "9bafda27-9255-4b5a-921f-69324343b9e0" (UID: "9bafda27-9255-4b5a-921f-69324343b9e0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.919833 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "9bafda27-9255-4b5a-921f-69324343b9e0" (UID: "9bafda27-9255-4b5a-921f-69324343b9e0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.928895 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-scripts\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.928971 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929070 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929117 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929140 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce4f4582-9147-4f5a-bf73-7d86cf4298da-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929162 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce4f4582-9147-4f5a-bf73-7d86cf4298da-logs\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929223 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6m74\" (UniqueName: \"kubernetes.io/projected/ce4f4582-9147-4f5a-bf73-7d86cf4298da-kube-api-access-f6m74\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929247 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-config-data\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929361 4747 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929378 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929390 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929401 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929412 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929424 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9bafda27-9255-4b5a-921f-69324343b9e0-logs\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929434 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8vct\" (UniqueName: \"kubernetes.io/projected/9bafda27-9255-4b5a-921f-69324343b9e0-kube-api-access-q8vct\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929448 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc039d89-567b-4837-8b5b-a5903c7a437c-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929475 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929488 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929500 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9bafda27-9255-4b5a-921f-69324343b9e0-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929511 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929522 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6137dd81-a215-4037-ade0-f3b2ddb4573b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.929533 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bafda27-9255-4b5a-921f-69324343b9e0-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.931946 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce4f4582-9147-4f5a-bf73-7d86cf4298da-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.932059 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.932515 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce4f4582-9147-4f5a-bf73-7d86cf4298da-logs\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.936495 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-config-data\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.937596 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-scripts\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.943725 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.944218 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.949012 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6m74\" (UniqueName: \"kubernetes.io/projected/ce4f4582-9147-4f5a-bf73-7d86cf4298da-kube-api-access-f6m74\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.957793 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 02 17:03:38 crc kubenswrapper[4747]: I1202 17:03:38.964998 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " pod="openstack/glance-default-external-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.035400 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.248498 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.404296 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" event={"ID":"6137dd81-a215-4037-ade0-f3b2ddb4573b","Type":"ContainerDied","Data":"b953c288c475fa6453218f9e24df16c16a4344bfa430c1ec5fdee7665e56e9a2"} Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.404317 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.405702 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d5cdc885-fzw6d" event={"ID":"6131d8c6-b2aa-44b9-afe5-617eccd809bc","Type":"ContainerDied","Data":"92f3dfbbc473420b77cb9087c55544e39707476a8eac37bc02bd11f65d195ade"} Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.405722 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d5cdc885-fzw6d" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.408389 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9bafda27-9255-4b5a-921f-69324343b9e0","Type":"ContainerDied","Data":"81739c64482ae1ad0c6d8cd5871f7628f9fdc04588119f354167138a10dc97b2"} Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.408692 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.409784 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q7gd9" event={"ID":"dc039d89-567b-4837-8b5b-a5903c7a437c","Type":"ContainerDied","Data":"10fa3f0412fe3292f13bed2158b477f96317edec554ad0ab83d37d8db952efb1"} Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.409812 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10fa3f0412fe3292f13bed2158b477f96317edec554ad0ab83d37d8db952efb1" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.409844 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q7gd9" Dec 02 17:03:39 crc kubenswrapper[4747]: E1202 17:03:39.412612 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-6f4g4" podUID="b3d0edb2-cca7-4f61-bf72-c3fddb909fea" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.467967 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-7cgwn"] Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.475607 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-7cgwn"] Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.520827 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-d5cdc885-fzw6d"] Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.533551 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-d5cdc885-fzw6d"] Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.548976 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.558514 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.575357 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.577249 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.580482 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.580826 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.602883 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.647437 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-config-data\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.648659 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/649da975-bf0f-4472-83e6-abcf48197293-logs\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.649040 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-scripts\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.649084 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/649da975-bf0f-4472-83e6-abcf48197293-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.649115 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rxpt\" (UniqueName: \"kubernetes.io/projected/649da975-bf0f-4472-83e6-abcf48197293-kube-api-access-6rxpt\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.649268 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.649320 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.649671 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.764344 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rxpt\" (UniqueName: \"kubernetes.io/projected/649da975-bf0f-4472-83e6-abcf48197293-kube-api-access-6rxpt\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.764816 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.764881 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.765031 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.765957 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-config-data\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.767284 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/649da975-bf0f-4472-83e6-abcf48197293-logs\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.767424 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.770881 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/649da975-bf0f-4472-83e6-abcf48197293-logs\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.767436 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-scripts\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.771131 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/649da975-bf0f-4472-83e6-abcf48197293-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.771970 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/649da975-bf0f-4472-83e6-abcf48197293-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.773095 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-scripts\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.780362 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-config-data\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.781092 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.785849 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rxpt\" (UniqueName: \"kubernetes.io/projected/649da975-bf0f-4472-83e6-abcf48197293-kube-api-access-6rxpt\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.802234 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.806183 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.813632 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18cb7361-76fe-4a4b-8c60-a82c7e0b16ec" path="/var/lib/kubelet/pods/18cb7361-76fe-4a4b-8c60-a82c7e0b16ec/volumes" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.815030 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6131d8c6-b2aa-44b9-afe5-617eccd809bc" path="/var/lib/kubelet/pods/6131d8c6-b2aa-44b9-afe5-617eccd809bc/volumes" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.815589 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6137dd81-a215-4037-ade0-f3b2ddb4573b" path="/var/lib/kubelet/pods/6137dd81-a215-4037-ade0-f3b2ddb4573b/volumes" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.816733 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bafda27-9255-4b5a-921f-69324343b9e0" path="/var/lib/kubelet/pods/9bafda27-9255-4b5a-921f-69324343b9e0/volumes" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.818261 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4e4fe63-a9e0-4903-be72-bc14a8d3113b" path="/var/lib/kubelet/pods/a4e4fe63-a9e0-4903-be72-bc14a8d3113b/volumes" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.819732 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-q7gd9"] Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.834565 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-q7gd9"] Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.890843 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-9gbx8"] Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.892890 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.902106 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6zr9j" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.902254 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.902498 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.902529 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.907558 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-9gbx8"] Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.949167 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.977723 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r8n8\" (UniqueName: \"kubernetes.io/projected/61d3d063-44d5-43ee-9b9e-a4560c33f775-kube-api-access-6r8n8\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.977785 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-credential-keys\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.977833 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-config-data\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.977860 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-scripts\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.977934 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-combined-ca-bundle\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:39 crc kubenswrapper[4747]: I1202 17:03:39.977964 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-fernet-keys\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.079124 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-combined-ca-bundle\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.079182 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-fernet-keys\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.079292 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r8n8\" (UniqueName: \"kubernetes.io/projected/61d3d063-44d5-43ee-9b9e-a4560c33f775-kube-api-access-6r8n8\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.079316 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-credential-keys\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.079377 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-config-data\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.079403 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-scripts\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.083623 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-combined-ca-bundle\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.084185 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-config-data\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.085533 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-fernet-keys\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.086656 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-credential-keys\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.088534 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-scripts\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.108285 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r8n8\" (UniqueName: \"kubernetes.io/projected/61d3d063-44d5-43ee-9b9e-a4560c33f775-kube-api-access-6r8n8\") pod \"keystone-bootstrap-9gbx8\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.222799 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.420055 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-568d9bc79c-hb4wc" event={"ID":"c499186d-ded2-4595-a041-07143f5676cd","Type":"ContainerDied","Data":"426877c942349c1a9f3260de3128e3b7197f3c44e548f766bc8ec0401117752b"} Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.420346 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="426877c942349c1a9f3260de3128e3b7197f3c44e548f766bc8ec0401117752b" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.420156 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:40 crc kubenswrapper[4747]: E1202 17:03:40.441410 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 02 17:03:40 crc kubenswrapper[4747]: E1202 17:03:40.441597 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6d62m,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-bgbtw_openstack(3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 17:03:40 crc kubenswrapper[4747]: E1202 17:03:40.442703 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-bgbtw" podUID="3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.454097 4747 scope.go:117] "RemoveContainer" containerID="4eb991c0cd52d31fbe2cf1164339d5c04498127b5366f83075522e9d2d3120ad" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.485017 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c499186d-ded2-4595-a041-07143f5676cd-logs\") pod \"c499186d-ded2-4595-a041-07143f5676cd\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.485127 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c499186d-ded2-4595-a041-07143f5676cd-config-data\") pod \"c499186d-ded2-4595-a041-07143f5676cd\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.485237 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52df8\" (UniqueName: \"kubernetes.io/projected/c499186d-ded2-4595-a041-07143f5676cd-kube-api-access-52df8\") pod \"c499186d-ded2-4595-a041-07143f5676cd\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.485314 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c499186d-ded2-4595-a041-07143f5676cd-scripts\") pod \"c499186d-ded2-4595-a041-07143f5676cd\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.485567 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c499186d-ded2-4595-a041-07143f5676cd-horizon-secret-key\") pod \"c499186d-ded2-4595-a041-07143f5676cd\" (UID: \"c499186d-ded2-4595-a041-07143f5676cd\") " Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.486151 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c499186d-ded2-4595-a041-07143f5676cd-config-data" (OuterVolumeSpecName: "config-data") pod "c499186d-ded2-4595-a041-07143f5676cd" (UID: "c499186d-ded2-4595-a041-07143f5676cd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.486732 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c499186d-ded2-4595-a041-07143f5676cd-logs" (OuterVolumeSpecName: "logs") pod "c499186d-ded2-4595-a041-07143f5676cd" (UID: "c499186d-ded2-4595-a041-07143f5676cd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.487188 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c499186d-ded2-4595-a041-07143f5676cd-scripts" (OuterVolumeSpecName: "scripts") pod "c499186d-ded2-4595-a041-07143f5676cd" (UID: "c499186d-ded2-4595-a041-07143f5676cd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.494096 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c499186d-ded2-4595-a041-07143f5676cd-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "c499186d-ded2-4595-a041-07143f5676cd" (UID: "c499186d-ded2-4595-a041-07143f5676cd"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.496068 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c499186d-ded2-4595-a041-07143f5676cd-kube-api-access-52df8" (OuterVolumeSpecName: "kube-api-access-52df8") pod "c499186d-ded2-4595-a041-07143f5676cd" (UID: "c499186d-ded2-4595-a041-07143f5676cd"). InnerVolumeSpecName "kube-api-access-52df8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.590218 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c499186d-ded2-4595-a041-07143f5676cd-logs\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.590257 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c499186d-ded2-4595-a041-07143f5676cd-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.590268 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52df8\" (UniqueName: \"kubernetes.io/projected/c499186d-ded2-4595-a041-07143f5676cd-kube-api-access-52df8\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.590279 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c499186d-ded2-4595-a041-07143f5676cd-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.590291 4747 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c499186d-ded2-4595-a041-07143f5676cd-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.597980 4747 scope.go:117] "RemoveContainer" containerID="1cf16f0c75df1a85e8936e861c0d671f1208acbbe1bc7af03fd232faea770196" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.651736 4747 scope.go:117] "RemoveContainer" containerID="b535c9317f227705f8479c998e9627ee826c457ee4786e28f6dcf0694deaf271" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.788275 4747 scope.go:117] "RemoveContainer" containerID="b4b794ba5ef57686542a7628886e3a73e92dd9ba7de9988196484760891dfd9c" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.856312 4747 scope.go:117] "RemoveContainer" containerID="9331fccf82f9f276b315a0c5e5a474f3c668cd82d1e40abf64db5664f4439c0e" Dec 02 17:03:40 crc kubenswrapper[4747]: I1202 17:03:40.864504 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7c85649748-scrqf"] Dec 02 17:03:40 crc kubenswrapper[4747]: W1202 17:03:40.883580 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ec3f1d7_119b_40fa_b0f4_3d2f353ee162.slice/crio-3c637e5d79891f7a9785f1b3bc1715f5e9702b995ddc2b1a6607b2f1a655d008 WatchSource:0}: Error finding container 3c637e5d79891f7a9785f1b3bc1715f5e9702b995ddc2b1a6607b2f1a655d008: Status 404 returned error can't find the container with id 3c637e5d79891f7a9785f1b3bc1715f5e9702b995ddc2b1a6607b2f1a655d008 Dec 02 17:03:41 crc kubenswrapper[4747]: I1202 17:03:41.069157 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-9gbx8"] Dec 02 17:03:41 crc kubenswrapper[4747]: I1202 17:03:41.073461 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5cc747446d-fcnwd"] Dec 02 17:03:41 crc kubenswrapper[4747]: W1202 17:03:41.087498 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb26d33d5_1b96_470b_8677_cb5273c72d25.slice/crio-d6c0d91e58b272df206e7ead7812af0495044d620559b29a1a0575cad8422a93 WatchSource:0}: Error finding container d6c0d91e58b272df206e7ead7812af0495044d620559b29a1a0575cad8422a93: Status 404 returned error can't find the container with id d6c0d91e58b272df206e7ead7812af0495044d620559b29a1a0575cad8422a93 Dec 02 17:03:41 crc kubenswrapper[4747]: I1202 17:03:41.303663 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 17:03:41 crc kubenswrapper[4747]: W1202 17:03:41.303804 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod649da975_bf0f_4472_83e6_abcf48197293.slice/crio-82f3a1173bc0226ea7adae9f763f04e9efefbd32f0af5adaf1ed3c0ec32095c2 WatchSource:0}: Error finding container 82f3a1173bc0226ea7adae9f763f04e9efefbd32f0af5adaf1ed3c0ec32095c2: Status 404 returned error can't find the container with id 82f3a1173bc0226ea7adae9f763f04e9efefbd32f0af5adaf1ed3c0ec32095c2 Dec 02 17:03:41 crc kubenswrapper[4747]: I1202 17:03:41.437333 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7c85649748-scrqf" event={"ID":"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162","Type":"ContainerStarted","Data":"3c637e5d79891f7a9785f1b3bc1715f5e9702b995ddc2b1a6607b2f1a655d008"} Dec 02 17:03:41 crc kubenswrapper[4747]: I1202 17:03:41.440583 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cc747446d-fcnwd" event={"ID":"b26d33d5-1b96-470b-8677-cb5273c72d25","Type":"ContainerStarted","Data":"d6c0d91e58b272df206e7ead7812af0495044d620559b29a1a0575cad8422a93"} Dec 02 17:03:41 crc kubenswrapper[4747]: I1202 17:03:41.442323 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"649da975-bf0f-4472-83e6-abcf48197293","Type":"ContainerStarted","Data":"82f3a1173bc0226ea7adae9f763f04e9efefbd32f0af5adaf1ed3c0ec32095c2"} Dec 02 17:03:41 crc kubenswrapper[4747]: I1202 17:03:41.449726 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"210762bb-c409-4ed0-af37-4e70f2c7c955","Type":"ContainerStarted","Data":"d7b83f13de7d9765e6fa532e061fb77c43f20f6c68ce8ba5959efba12a03f098"} Dec 02 17:03:41 crc kubenswrapper[4747]: I1202 17:03:41.462063 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sclpf" event={"ID":"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8","Type":"ContainerStarted","Data":"fc2bc18dd44a49460d0c436c80a85d551e46008ce9a3b3e10dd6430713051a0b"} Dec 02 17:03:41 crc kubenswrapper[4747]: I1202 17:03:41.468551 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9gbx8" event={"ID":"61d3d063-44d5-43ee-9b9e-a4560c33f775","Type":"ContainerStarted","Data":"3e8ff609f25fdea982bb3091ca88896f7e8d46afd4c12cc4421b3ab74c34c3cc"} Dec 02 17:03:41 crc kubenswrapper[4747]: I1202 17:03:41.468604 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9gbx8" event={"ID":"61d3d063-44d5-43ee-9b9e-a4560c33f775","Type":"ContainerStarted","Data":"90fc76262561a49acb439dcf758fe6322641228da3a771e6fbbc0b34ccde5160"} Dec 02 17:03:41 crc kubenswrapper[4747]: I1202 17:03:41.472887 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-568d9bc79c-hb4wc" Dec 02 17:03:41 crc kubenswrapper[4747]: I1202 17:03:41.498985 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-sclpf" podStartSLOduration=3.309208018 podStartE2EDuration="36.498957591s" podCreationTimestamp="2025-12-02 17:03:05 +0000 UTC" firstStartedPulling="2025-12-02 17:03:07.462891958 +0000 UTC m=+1217.989780707" lastFinishedPulling="2025-12-02 17:03:40.652641531 +0000 UTC m=+1251.179530280" observedRunningTime="2025-12-02 17:03:41.482159213 +0000 UTC m=+1252.009047962" watchObservedRunningTime="2025-12-02 17:03:41.498957591 +0000 UTC m=+1252.025846340" Dec 02 17:03:41 crc kubenswrapper[4747]: E1202 17:03:41.522797 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-bgbtw" podUID="3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2" Dec 02 17:03:41 crc kubenswrapper[4747]: I1202 17:03:41.537487 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-9gbx8" podStartSLOduration=2.537458866 podStartE2EDuration="2.537458866s" podCreationTimestamp="2025-12-02 17:03:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:03:41.511712804 +0000 UTC m=+1252.038601563" watchObservedRunningTime="2025-12-02 17:03:41.537458866 +0000 UTC m=+1252.064347615" Dec 02 17:03:41 crc kubenswrapper[4747]: I1202 17:03:41.630037 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-568d9bc79c-hb4wc"] Dec 02 17:03:41 crc kubenswrapper[4747]: I1202 17:03:41.635647 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-568d9bc79c-hb4wc"] Dec 02 17:03:41 crc kubenswrapper[4747]: I1202 17:03:41.775892 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c499186d-ded2-4595-a041-07143f5676cd" path="/var/lib/kubelet/pods/c499186d-ded2-4595-a041-07143f5676cd/volumes" Dec 02 17:03:41 crc kubenswrapper[4747]: I1202 17:03:41.776881 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc039d89-567b-4837-8b5b-a5903c7a437c" path="/var/lib/kubelet/pods/dc039d89-567b-4837-8b5b-a5903c7a437c/volumes" Dec 02 17:03:42 crc kubenswrapper[4747]: I1202 17:03:42.121792 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 17:03:42 crc kubenswrapper[4747]: W1202 17:03:42.137497 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce4f4582_9147_4f5a_bf73_7d86cf4298da.slice/crio-eb3f7acad6609269b84b8637332c8e4acf3753f4a27fa3178e1d454caf4e4081 WatchSource:0}: Error finding container eb3f7acad6609269b84b8637332c8e4acf3753f4a27fa3178e1d454caf4e4081: Status 404 returned error can't find the container with id eb3f7acad6609269b84b8637332c8e4acf3753f4a27fa3178e1d454caf4e4081 Dec 02 17:03:42 crc kubenswrapper[4747]: I1202 17:03:42.202175 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7ff5475cc9-7cgwn" podUID="6137dd81-a215-4037-ade0-f3b2ddb4573b" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.126:5353: i/o timeout" Dec 02 17:03:42 crc kubenswrapper[4747]: I1202 17:03:42.483128 4747 generic.go:334] "Generic (PLEG): container finished" podID="d05e3f55-d7e5-417a-9344-e24038aaa516" containerID="9c22ee406fa8516908ec60f0cc4a3a8dacc8dcb202d0f8112b59ee1aa68bf9e0" exitCode=0 Dec 02 17:03:42 crc kubenswrapper[4747]: I1202 17:03:42.483192 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-q5btv" event={"ID":"d05e3f55-d7e5-417a-9344-e24038aaa516","Type":"ContainerDied","Data":"9c22ee406fa8516908ec60f0cc4a3a8dacc8dcb202d0f8112b59ee1aa68bf9e0"} Dec 02 17:03:42 crc kubenswrapper[4747]: I1202 17:03:42.494665 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7c85649748-scrqf" event={"ID":"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162","Type":"ContainerStarted","Data":"0dcbf940c436b0c06f055f8640ab5ac7248142f2b0a57b87ebe3fbe3a9dca7c8"} Dec 02 17:03:42 crc kubenswrapper[4747]: I1202 17:03:42.494719 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7c85649748-scrqf" event={"ID":"4ec3f1d7-119b-40fa-b0f4-3d2f353ee162","Type":"ContainerStarted","Data":"7f2face32d22452f4a05cab822070c3c1e11b195757391ba8235125713196fa4"} Dec 02 17:03:42 crc kubenswrapper[4747]: I1202 17:03:42.504535 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ce4f4582-9147-4f5a-bf73-7d86cf4298da","Type":"ContainerStarted","Data":"eb3f7acad6609269b84b8637332c8e4acf3753f4a27fa3178e1d454caf4e4081"} Dec 02 17:03:42 crc kubenswrapper[4747]: I1202 17:03:42.517207 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cc747446d-fcnwd" event={"ID":"b26d33d5-1b96-470b-8677-cb5273c72d25","Type":"ContainerStarted","Data":"92cc25f582af9f955108070de723a136742482bbeaa439305c18cea41ec4b329"} Dec 02 17:03:42 crc kubenswrapper[4747]: I1202 17:03:42.517287 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cc747446d-fcnwd" event={"ID":"b26d33d5-1b96-470b-8677-cb5273c72d25","Type":"ContainerStarted","Data":"a23dd2f8d5c9c22dba3575c453fe1e808eefeb21f0eec73a7710b1121e1f288b"} Dec 02 17:03:42 crc kubenswrapper[4747]: I1202 17:03:42.523038 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"649da975-bf0f-4472-83e6-abcf48197293","Type":"ContainerStarted","Data":"1aa6c65fa3c889db4c4d07dfde2aea060ad6f2269e943afaf7fee37504b74a3c"} Dec 02 17:03:42 crc kubenswrapper[4747]: I1202 17:03:42.552051 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7c85649748-scrqf" podStartSLOduration=28.916786298 podStartE2EDuration="29.552018582s" podCreationTimestamp="2025-12-02 17:03:13 +0000 UTC" firstStartedPulling="2025-12-02 17:03:40.892022682 +0000 UTC m=+1251.418911431" lastFinishedPulling="2025-12-02 17:03:41.527254966 +0000 UTC m=+1252.054143715" observedRunningTime="2025-12-02 17:03:42.546791163 +0000 UTC m=+1253.073679912" watchObservedRunningTime="2025-12-02 17:03:42.552018582 +0000 UTC m=+1253.078907331" Dec 02 17:03:42 crc kubenswrapper[4747]: I1202 17:03:42.591625 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5cc747446d-fcnwd" podStartSLOduration=28.89474628 podStartE2EDuration="29.591603098s" podCreationTimestamp="2025-12-02 17:03:13 +0000 UTC" firstStartedPulling="2025-12-02 17:03:41.098693482 +0000 UTC m=+1251.625582241" lastFinishedPulling="2025-12-02 17:03:41.7955503 +0000 UTC m=+1252.322439059" observedRunningTime="2025-12-02 17:03:42.587770769 +0000 UTC m=+1253.114659518" watchObservedRunningTime="2025-12-02 17:03:42.591603098 +0000 UTC m=+1253.118491847" Dec 02 17:03:43 crc kubenswrapper[4747]: I1202 17:03:43.538401 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"649da975-bf0f-4472-83e6-abcf48197293","Type":"ContainerStarted","Data":"4906302f495247f2072be155905a50ec447e50a35debab91dca5aab3c23e8940"} Dec 02 17:03:43 crc kubenswrapper[4747]: I1202 17:03:43.541059 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"210762bb-c409-4ed0-af37-4e70f2c7c955","Type":"ContainerStarted","Data":"4b3de528fe544733b59a27fd0ede0471796837f58ac4df3491c6fe0b7a5190eb"} Dec 02 17:03:43 crc kubenswrapper[4747]: I1202 17:03:43.542591 4747 generic.go:334] "Generic (PLEG): container finished" podID="b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8" containerID="fc2bc18dd44a49460d0c436c80a85d551e46008ce9a3b3e10dd6430713051a0b" exitCode=0 Dec 02 17:03:43 crc kubenswrapper[4747]: I1202 17:03:43.542630 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sclpf" event={"ID":"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8","Type":"ContainerDied","Data":"fc2bc18dd44a49460d0c436c80a85d551e46008ce9a3b3e10dd6430713051a0b"} Dec 02 17:03:43 crc kubenswrapper[4747]: I1202 17:03:43.545043 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ce4f4582-9147-4f5a-bf73-7d86cf4298da","Type":"ContainerStarted","Data":"94e08e3f8fde06a4189e486e1d074482757272ead4c513a89348683519c71528"} Dec 02 17:03:43 crc kubenswrapper[4747]: I1202 17:03:43.578242 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.578200399 podStartE2EDuration="4.578200399s" podCreationTimestamp="2025-12-02 17:03:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:03:43.568953916 +0000 UTC m=+1254.095842665" watchObservedRunningTime="2025-12-02 17:03:43.578200399 +0000 UTC m=+1254.105089158" Dec 02 17:03:43 crc kubenswrapper[4747]: I1202 17:03:43.985482 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-q5btv" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.077856 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d05e3f55-d7e5-417a-9344-e24038aaa516-combined-ca-bundle\") pod \"d05e3f55-d7e5-417a-9344-e24038aaa516\" (UID: \"d05e3f55-d7e5-417a-9344-e24038aaa516\") " Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.077969 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d05e3f55-d7e5-417a-9344-e24038aaa516-config\") pod \"d05e3f55-d7e5-417a-9344-e24038aaa516\" (UID: \"d05e3f55-d7e5-417a-9344-e24038aaa516\") " Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.078074 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dps92\" (UniqueName: \"kubernetes.io/projected/d05e3f55-d7e5-417a-9344-e24038aaa516-kube-api-access-dps92\") pod \"d05e3f55-d7e5-417a-9344-e24038aaa516\" (UID: \"d05e3f55-d7e5-417a-9344-e24038aaa516\") " Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.099762 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d05e3f55-d7e5-417a-9344-e24038aaa516-kube-api-access-dps92" (OuterVolumeSpecName: "kube-api-access-dps92") pod "d05e3f55-d7e5-417a-9344-e24038aaa516" (UID: "d05e3f55-d7e5-417a-9344-e24038aaa516"). InnerVolumeSpecName "kube-api-access-dps92". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.126518 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d05e3f55-d7e5-417a-9344-e24038aaa516-config" (OuterVolumeSpecName: "config") pod "d05e3f55-d7e5-417a-9344-e24038aaa516" (UID: "d05e3f55-d7e5-417a-9344-e24038aaa516"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.151035 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d05e3f55-d7e5-417a-9344-e24038aaa516-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d05e3f55-d7e5-417a-9344-e24038aaa516" (UID: "d05e3f55-d7e5-417a-9344-e24038aaa516"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.180334 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d05e3f55-d7e5-417a-9344-e24038aaa516-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.180376 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/d05e3f55-d7e5-417a-9344-e24038aaa516-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.180387 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dps92\" (UniqueName: \"kubernetes.io/projected/d05e3f55-d7e5-417a-9344-e24038aaa516-kube-api-access-dps92\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.316187 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.316265 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.558045 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-q5btv" event={"ID":"d05e3f55-d7e5-417a-9344-e24038aaa516","Type":"ContainerDied","Data":"b692fed4be970e4fcd8728153820626292e488411311da4f761e8a918c5931dc"} Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.558109 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b692fed4be970e4fcd8728153820626292e488411311da4f761e8a918c5931dc" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.558201 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-q5btv" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.574990 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ce4f4582-9147-4f5a-bf73-7d86cf4298da","Type":"ContainerStarted","Data":"05fd0c732f6d63146451b147c99067b0a248cffd9a8b19a504b668e33e1434f9"} Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.617234 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.617209372 podStartE2EDuration="6.617209372s" podCreationTimestamp="2025-12-02 17:03:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:03:44.598757187 +0000 UTC m=+1255.125645936" watchObservedRunningTime="2025-12-02 17:03:44.617209372 +0000 UTC m=+1255.144098121" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.639774 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.640414 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.786035 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-h9fhb"] Dec 02 17:03:44 crc kubenswrapper[4747]: E1202 17:03:44.786500 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d05e3f55-d7e5-417a-9344-e24038aaa516" containerName="neutron-db-sync" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.786517 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d05e3f55-d7e5-417a-9344-e24038aaa516" containerName="neutron-db-sync" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.791763 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d05e3f55-d7e5-417a-9344-e24038aaa516" containerName="neutron-db-sync" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.793295 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.805118 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-h9fhb"] Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.868077 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7f469b9bd4-z45tg"] Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.883611 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7f469b9bd4-z45tg"] Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.883807 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.888173 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.888538 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.888720 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-jfm9d" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.889707 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.897445 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.897512 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh7xd\" (UniqueName: \"kubernetes.io/projected/7e213849-808c-4275-971d-daf77f6ea6f3-kube-api-access-fh7xd\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.897536 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.897565 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.897618 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-config\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:44 crc kubenswrapper[4747]: I1202 17:03:44.897666 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.000100 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.000160 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-ovndb-tls-certs\") pod \"neutron-7f469b9bd4-z45tg\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.000189 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-config\") pod \"neutron-7f469b9bd4-z45tg\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.000213 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-httpd-config\") pod \"neutron-7f469b9bd4-z45tg\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.000230 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s74jd\" (UniqueName: \"kubernetes.io/projected/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-kube-api-access-s74jd\") pod \"neutron-7f469b9bd4-z45tg\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.000495 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-combined-ca-bundle\") pod \"neutron-7f469b9bd4-z45tg\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.000698 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.001647 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.002201 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.002346 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh7xd\" (UniqueName: \"kubernetes.io/projected/7e213849-808c-4275-971d-daf77f6ea6f3-kube-api-access-fh7xd\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.002372 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.003763 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.003858 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.004046 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-config\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.004732 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-config\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.005347 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.020919 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.023592 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh7xd\" (UniqueName: \"kubernetes.io/projected/7e213849-808c-4275-971d-daf77f6ea6f3-kube-api-access-fh7xd\") pod \"dnsmasq-dns-84b966f6c9-h9fhb\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.105532 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-combined-ca-bundle\") pod \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.105923 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-scripts\") pod \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.105976 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-logs\") pod \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.106054 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kmm5g\" (UniqueName: \"kubernetes.io/projected/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-kube-api-access-kmm5g\") pod \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.106104 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-config-data\") pod \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\" (UID: \"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8\") " Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.106435 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-ovndb-tls-certs\") pod \"neutron-7f469b9bd4-z45tg\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.106467 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-config\") pod \"neutron-7f469b9bd4-z45tg\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.106488 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-httpd-config\") pod \"neutron-7f469b9bd4-z45tg\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.106505 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s74jd\" (UniqueName: \"kubernetes.io/projected/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-kube-api-access-s74jd\") pod \"neutron-7f469b9bd4-z45tg\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.106547 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-combined-ca-bundle\") pod \"neutron-7f469b9bd4-z45tg\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.107810 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-logs" (OuterVolumeSpecName: "logs") pod "b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8" (UID: "b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.116165 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-httpd-config\") pod \"neutron-7f469b9bd4-z45tg\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.117304 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-ovndb-tls-certs\") pod \"neutron-7f469b9bd4-z45tg\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.118331 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-config\") pod \"neutron-7f469b9bd4-z45tg\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.126517 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-kube-api-access-kmm5g" (OuterVolumeSpecName: "kube-api-access-kmm5g") pod "b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8" (UID: "b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8"). InnerVolumeSpecName "kube-api-access-kmm5g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.128482 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-combined-ca-bundle\") pod \"neutron-7f469b9bd4-z45tg\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.134066 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-scripts" (OuterVolumeSpecName: "scripts") pod "b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8" (UID: "b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.134679 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s74jd\" (UniqueName: \"kubernetes.io/projected/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-kube-api-access-s74jd\") pod \"neutron-7f469b9bd4-z45tg\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.147816 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.169850 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-config-data" (OuterVolumeSpecName: "config-data") pod "b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8" (UID: "b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.186044 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8" (UID: "b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.208564 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.208620 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.208633 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-logs\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.208644 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kmm5g\" (UniqueName: \"kubernetes.io/projected/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-kube-api-access-kmm5g\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.208659 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.216420 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.592978 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-sclpf" event={"ID":"b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8","Type":"ContainerDied","Data":"32835bfc0686335264d699db43f5f62e44e73b1ccc30415561ff23aa8f9f4551"} Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.593306 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="32835bfc0686335264d699db43f5f62e44e73b1ccc30415561ff23aa8f9f4551" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.593391 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-sclpf" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.698971 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6b8f479dd4-zx826"] Dec 02 17:03:45 crc kubenswrapper[4747]: E1202 17:03:45.699462 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8" containerName="placement-db-sync" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.699487 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8" containerName="placement-db-sync" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.699723 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8" containerName="placement-db-sync" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.707461 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.713567 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.713889 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.714033 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-p5l74" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.714207 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.714788 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.716556 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6b8f479dd4-zx826"] Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.829081 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-h9fhb"] Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.830578 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efad855-47a7-443a-9b34-0f4137d526e0-scripts\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.830670 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efad855-47a7-443a-9b34-0f4137d526e0-config-data\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.830734 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2efad855-47a7-443a-9b34-0f4137d526e0-internal-tls-certs\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.830759 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjv86\" (UniqueName: \"kubernetes.io/projected/2efad855-47a7-443a-9b34-0f4137d526e0-kube-api-access-gjv86\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.830781 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2efad855-47a7-443a-9b34-0f4137d526e0-public-tls-certs\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.830804 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2efad855-47a7-443a-9b34-0f4137d526e0-logs\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.830851 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efad855-47a7-443a-9b34-0f4137d526e0-combined-ca-bundle\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: W1202 17:03:45.864768 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e213849_808c_4275_971d_daf77f6ea6f3.slice/crio-3f5b6a82e4dc37df0d860de7ee1be051db71335e2869be77cc775a995c9c1af9 WatchSource:0}: Error finding container 3f5b6a82e4dc37df0d860de7ee1be051db71335e2869be77cc775a995c9c1af9: Status 404 returned error can't find the container with id 3f5b6a82e4dc37df0d860de7ee1be051db71335e2869be77cc775a995c9c1af9 Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.932490 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efad855-47a7-443a-9b34-0f4137d526e0-config-data\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.932590 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2efad855-47a7-443a-9b34-0f4137d526e0-internal-tls-certs\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.932630 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjv86\" (UniqueName: \"kubernetes.io/projected/2efad855-47a7-443a-9b34-0f4137d526e0-kube-api-access-gjv86\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.932659 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2efad855-47a7-443a-9b34-0f4137d526e0-public-tls-certs\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.932686 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2efad855-47a7-443a-9b34-0f4137d526e0-logs\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.932746 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efad855-47a7-443a-9b34-0f4137d526e0-combined-ca-bundle\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.932824 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efad855-47a7-443a-9b34-0f4137d526e0-scripts\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.934435 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2efad855-47a7-443a-9b34-0f4137d526e0-logs\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.937478 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2efad855-47a7-443a-9b34-0f4137d526e0-scripts\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.938139 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2efad855-47a7-443a-9b34-0f4137d526e0-public-tls-certs\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.938574 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2efad855-47a7-443a-9b34-0f4137d526e0-internal-tls-certs\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.940074 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2efad855-47a7-443a-9b34-0f4137d526e0-combined-ca-bundle\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.941827 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2efad855-47a7-443a-9b34-0f4137d526e0-config-data\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:45 crc kubenswrapper[4747]: I1202 17:03:45.954922 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjv86\" (UniqueName: \"kubernetes.io/projected/2efad855-47a7-443a-9b34-0f4137d526e0-kube-api-access-gjv86\") pod \"placement-6b8f479dd4-zx826\" (UID: \"2efad855-47a7-443a-9b34-0f4137d526e0\") " pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:46 crc kubenswrapper[4747]: I1202 17:03:46.013565 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7f469b9bd4-z45tg"] Dec 02 17:03:46 crc kubenswrapper[4747]: W1202 17:03:46.029509 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd055ad7e_75e6_4ce7_ac6b_c966057f8ace.slice/crio-c540ef0b8bed9a6a8f48f394c68393a23d52e0dfc7cee1248c8934affa9077cc WatchSource:0}: Error finding container c540ef0b8bed9a6a8f48f394c68393a23d52e0dfc7cee1248c8934affa9077cc: Status 404 returned error can't find the container with id c540ef0b8bed9a6a8f48f394c68393a23d52e0dfc7cee1248c8934affa9077cc Dec 02 17:03:46 crc kubenswrapper[4747]: I1202 17:03:46.098422 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:46 crc kubenswrapper[4747]: I1202 17:03:46.608939 4747 generic.go:334] "Generic (PLEG): container finished" podID="7e213849-808c-4275-971d-daf77f6ea6f3" containerID="002c7cc7881e184e97af1a91734c5ac2233684c17434f3e291f8a59f3683110c" exitCode=0 Dec 02 17:03:46 crc kubenswrapper[4747]: I1202 17:03:46.609783 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" event={"ID":"7e213849-808c-4275-971d-daf77f6ea6f3","Type":"ContainerDied","Data":"002c7cc7881e184e97af1a91734c5ac2233684c17434f3e291f8a59f3683110c"} Dec 02 17:03:46 crc kubenswrapper[4747]: I1202 17:03:46.609821 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" event={"ID":"7e213849-808c-4275-971d-daf77f6ea6f3","Type":"ContainerStarted","Data":"3f5b6a82e4dc37df0d860de7ee1be051db71335e2869be77cc775a995c9c1af9"} Dec 02 17:03:46 crc kubenswrapper[4747]: I1202 17:03:46.625940 4747 generic.go:334] "Generic (PLEG): container finished" podID="61d3d063-44d5-43ee-9b9e-a4560c33f775" containerID="3e8ff609f25fdea982bb3091ca88896f7e8d46afd4c12cc4421b3ab74c34c3cc" exitCode=0 Dec 02 17:03:46 crc kubenswrapper[4747]: I1202 17:03:46.626054 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9gbx8" event={"ID":"61d3d063-44d5-43ee-9b9e-a4560c33f775","Type":"ContainerDied","Data":"3e8ff609f25fdea982bb3091ca88896f7e8d46afd4c12cc4421b3ab74c34c3cc"} Dec 02 17:03:46 crc kubenswrapper[4747]: I1202 17:03:46.634061 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7f469b9bd4-z45tg" event={"ID":"d055ad7e-75e6-4ce7-ac6b-c966057f8ace","Type":"ContainerStarted","Data":"b837a0ad3161a36cf803b3662cb42735f758d0f967d8cf64e6c6aee19007a45b"} Dec 02 17:03:46 crc kubenswrapper[4747]: I1202 17:03:46.634127 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7f469b9bd4-z45tg" event={"ID":"d055ad7e-75e6-4ce7-ac6b-c966057f8ace","Type":"ContainerStarted","Data":"c540ef0b8bed9a6a8f48f394c68393a23d52e0dfc7cee1248c8934affa9077cc"} Dec 02 17:03:46 crc kubenswrapper[4747]: I1202 17:03:46.719622 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6b8f479dd4-zx826"] Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.503694 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7b56d86469-kh76t"] Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.506249 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.513469 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7b56d86469-kh76t"] Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.517234 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.517965 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.566510 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-ovndb-tls-certs\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.566580 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-config\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.566621 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-public-tls-certs\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.566702 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-internal-tls-certs\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.567110 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-httpd-config\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.567199 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-combined-ca-bundle\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.567259 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mf9tk\" (UniqueName: \"kubernetes.io/projected/8c47782d-dc20-451f-ac66-8555693b819f-kube-api-access-mf9tk\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.649603 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6b8f479dd4-zx826" event={"ID":"2efad855-47a7-443a-9b34-0f4137d526e0","Type":"ContainerStarted","Data":"9b0a8042e4eea7aa340b5f872b144e3dd154e6a7642628560976a4d771c969e3"} Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.649692 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6b8f479dd4-zx826" event={"ID":"2efad855-47a7-443a-9b34-0f4137d526e0","Type":"ContainerStarted","Data":"10016caa9397626baac6540ab510452e52bcfe4fdfbab21b6afe4f8eacd03d90"} Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.649707 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6b8f479dd4-zx826" event={"ID":"2efad855-47a7-443a-9b34-0f4137d526e0","Type":"ContainerStarted","Data":"ac31c8ced5fc99237b9206df65349802d5c820b5e5e2931dc111fe2dd1943a28"} Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.651351 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.651394 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.669185 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-ovndb-tls-certs\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.669567 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-config\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.669619 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-public-tls-certs\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.669676 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-internal-tls-certs\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.669794 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-httpd-config\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.669825 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-combined-ca-bundle\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.669878 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mf9tk\" (UniqueName: \"kubernetes.io/projected/8c47782d-dc20-451f-ac66-8555693b819f-kube-api-access-mf9tk\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.684676 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7f469b9bd4-z45tg" event={"ID":"d055ad7e-75e6-4ce7-ac6b-c966057f8ace","Type":"ContainerStarted","Data":"f6caf9ad107a550ea6207e6149e40be0d2d3ad5470664cdee598e2e0be45e0a7"} Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.685545 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.690523 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-config\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.691595 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-public-tls-certs\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.691702 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-httpd-config\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.797527 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-6b8f479dd4-zx826" podStartSLOduration=2.797496594 podStartE2EDuration="2.797496594s" podCreationTimestamp="2025-12-02 17:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:03:47.685058439 +0000 UTC m=+1258.211947188" watchObservedRunningTime="2025-12-02 17:03:47.797496594 +0000 UTC m=+1258.324385333" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.798142 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-internal-tls-certs\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.843454 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-ovndb-tls-certs\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.844025 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c47782d-dc20-451f-ac66-8555693b819f-combined-ca-bundle\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.852889 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" event={"ID":"7e213849-808c-4275-971d-daf77f6ea6f3","Type":"ContainerStarted","Data":"09ea91fdbdb74f45443e7525c1760c00352c491daab21e8ce291b82719bfdf7e"} Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.852951 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.878002 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" podStartSLOduration=3.877983037 podStartE2EDuration="3.877983037s" podCreationTimestamp="2025-12-02 17:03:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:03:47.876049852 +0000 UTC m=+1258.402938601" watchObservedRunningTime="2025-12-02 17:03:47.877983037 +0000 UTC m=+1258.404871786" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.881323 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7f469b9bd4-z45tg" podStartSLOduration=3.881307962 podStartE2EDuration="3.881307962s" podCreationTimestamp="2025-12-02 17:03:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:03:47.838187183 +0000 UTC m=+1258.365076102" watchObservedRunningTime="2025-12-02 17:03:47.881307962 +0000 UTC m=+1258.408196711" Dec 02 17:03:47 crc kubenswrapper[4747]: I1202 17:03:47.906868 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mf9tk\" (UniqueName: \"kubernetes.io/projected/8c47782d-dc20-451f-ac66-8555693b819f-kube-api-access-mf9tk\") pod \"neutron-7b56d86469-kh76t\" (UID: \"8c47782d-dc20-451f-ac66-8555693b819f\") " pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:48 crc kubenswrapper[4747]: I1202 17:03:48.144922 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:49 crc kubenswrapper[4747]: I1202 17:03:49.249341 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 02 17:03:49 crc kubenswrapper[4747]: I1202 17:03:49.250068 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 02 17:03:49 crc kubenswrapper[4747]: I1202 17:03:49.305518 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 02 17:03:49 crc kubenswrapper[4747]: I1202 17:03:49.307603 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 02 17:03:49 crc kubenswrapper[4747]: I1202 17:03:49.881968 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 17:03:49 crc kubenswrapper[4747]: I1202 17:03:49.882043 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 17:03:49 crc kubenswrapper[4747]: I1202 17:03:49.950071 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:49 crc kubenswrapper[4747]: I1202 17:03:49.950118 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:49 crc kubenswrapper[4747]: I1202 17:03:49.985568 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:50 crc kubenswrapper[4747]: I1202 17:03:50.006171 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:50 crc kubenswrapper[4747]: I1202 17:03:50.890181 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:50 crc kubenswrapper[4747]: I1202 17:03:50.891798 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:51 crc kubenswrapper[4747]: I1202 17:03:51.898772 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 17:03:51 crc kubenswrapper[4747]: I1202 17:03:51.899133 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 17:03:52 crc kubenswrapper[4747]: I1202 17:03:52.030175 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 02 17:03:52 crc kubenswrapper[4747]: I1202 17:03:52.084675 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 02 17:03:52 crc kubenswrapper[4747]: I1202 17:03:52.907571 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 17:03:52 crc kubenswrapper[4747]: I1202 17:03:52.907960 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.339435 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.449571 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-credential-keys\") pod \"61d3d063-44d5-43ee-9b9e-a4560c33f775\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.449660 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-fernet-keys\") pod \"61d3d063-44d5-43ee-9b9e-a4560c33f775\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.453711 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-config-data\") pod \"61d3d063-44d5-43ee-9b9e-a4560c33f775\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.453779 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-combined-ca-bundle\") pod \"61d3d063-44d5-43ee-9b9e-a4560c33f775\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.453849 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-scripts\") pod \"61d3d063-44d5-43ee-9b9e-a4560c33f775\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.453955 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6r8n8\" (UniqueName: \"kubernetes.io/projected/61d3d063-44d5-43ee-9b9e-a4560c33f775-kube-api-access-6r8n8\") pod \"61d3d063-44d5-43ee-9b9e-a4560c33f775\" (UID: \"61d3d063-44d5-43ee-9b9e-a4560c33f775\") " Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.460747 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "61d3d063-44d5-43ee-9b9e-a4560c33f775" (UID: "61d3d063-44d5-43ee-9b9e-a4560c33f775"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.460861 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "61d3d063-44d5-43ee-9b9e-a4560c33f775" (UID: "61d3d063-44d5-43ee-9b9e-a4560c33f775"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.474318 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61d3d063-44d5-43ee-9b9e-a4560c33f775-kube-api-access-6r8n8" (OuterVolumeSpecName: "kube-api-access-6r8n8") pod "61d3d063-44d5-43ee-9b9e-a4560c33f775" (UID: "61d3d063-44d5-43ee-9b9e-a4560c33f775"). InnerVolumeSpecName "kube-api-access-6r8n8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.476171 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-scripts" (OuterVolumeSpecName: "scripts") pod "61d3d063-44d5-43ee-9b9e-a4560c33f775" (UID: "61d3d063-44d5-43ee-9b9e-a4560c33f775"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.497362 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.500310 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.505260 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "61d3d063-44d5-43ee-9b9e-a4560c33f775" (UID: "61d3d063-44d5-43ee-9b9e-a4560c33f775"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.525131 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-config-data" (OuterVolumeSpecName: "config-data") pod "61d3d063-44d5-43ee-9b9e-a4560c33f775" (UID: "61d3d063-44d5-43ee-9b9e-a4560c33f775"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.560143 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.560186 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.560197 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.560208 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6r8n8\" (UniqueName: \"kubernetes.io/projected/61d3d063-44d5-43ee-9b9e-a4560c33f775-kube-api-access-6r8n8\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.560217 4747 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.560225 4747 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/61d3d063-44d5-43ee-9b9e-a4560c33f775-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.886157 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7b56d86469-kh76t"] Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.926355 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-9gbx8" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.926627 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-9gbx8" event={"ID":"61d3d063-44d5-43ee-9b9e-a4560c33f775","Type":"ContainerDied","Data":"90fc76262561a49acb439dcf758fe6322641228da3a771e6fbbc0b34ccde5160"} Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.926683 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="90fc76262561a49acb439dcf758fe6322641228da3a771e6fbbc0b34ccde5160" Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.931226 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"210762bb-c409-4ed0-af37-4e70f2c7c955","Type":"ContainerStarted","Data":"47afa13ede895aa4912bf6a9039ad232fe30afdb00f9a44fbdc816196a3adad2"} Dec 02 17:03:53 crc kubenswrapper[4747]: I1202 17:03:53.934230 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7b56d86469-kh76t" event={"ID":"8c47782d-dc20-451f-ac66-8555693b819f","Type":"ContainerStarted","Data":"913b11fcec5e8692369b40c8a87a6d28deb29f5e9c0bf4d5d0b8282a24bc4d68"} Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.315141 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5cc747446d-fcnwd" podUID="b26d33d5-1b96-470b-8677-cb5273c72d25" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.522288 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7d78c5c778-tg55f"] Dec 02 17:03:54 crc kubenswrapper[4747]: E1202 17:03:54.522849 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61d3d063-44d5-43ee-9b9e-a4560c33f775" containerName="keystone-bootstrap" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.522864 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="61d3d063-44d5-43ee-9b9e-a4560c33f775" containerName="keystone-bootstrap" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.525192 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="61d3d063-44d5-43ee-9b9e-a4560c33f775" containerName="keystone-bootstrap" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.525787 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.531121 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.531385 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6zr9j" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.531620 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.531730 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.531859 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.532052 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.557892 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7d78c5c778-tg55f"] Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.635631 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-fernet-keys\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.638427 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-285k2\" (UniqueName: \"kubernetes.io/projected/687002d8-0be7-43ec-a1aa-61b724aec872-kube-api-access-285k2\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.638597 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-public-tls-certs\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.638654 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-config-data\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.638768 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-combined-ca-bundle\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.638883 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-internal-tls-certs\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.638972 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-scripts\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.639030 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-credential-keys\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.651320 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7c85649748-scrqf" podUID="4ec3f1d7-119b-40fa-b0f4-3d2f353ee162" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.740473 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-fernet-keys\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.740525 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-285k2\" (UniqueName: \"kubernetes.io/projected/687002d8-0be7-43ec-a1aa-61b724aec872-kube-api-access-285k2\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.740572 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-public-tls-certs\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.740595 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-config-data\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.740638 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-combined-ca-bundle\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.740674 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-internal-tls-certs\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.740709 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-scripts\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.740729 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-credential-keys\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.744923 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-credential-keys\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.745354 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-combined-ca-bundle\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.745566 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-config-data\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.746517 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-public-tls-certs\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.747515 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-scripts\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.748449 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-fernet-keys\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.750448 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/687002d8-0be7-43ec-a1aa-61b724aec872-internal-tls-certs\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.759479 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-285k2\" (UniqueName: \"kubernetes.io/projected/687002d8-0be7-43ec-a1aa-61b724aec872-kube-api-access-285k2\") pod \"keystone-7d78c5c778-tg55f\" (UID: \"687002d8-0be7-43ec-a1aa-61b724aec872\") " pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.889586 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.965602 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7b56d86469-kh76t" event={"ID":"8c47782d-dc20-451f-ac66-8555693b819f","Type":"ContainerStarted","Data":"8054ad4efe55c263d3fb3508d12b1246bed9dfc84f1913301de4b64eca62d5c8"} Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.965655 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7b56d86469-kh76t" event={"ID":"8c47782d-dc20-451f-ac66-8555693b819f","Type":"ContainerStarted","Data":"dfd71ab6a4ebbeab13e8a1a2ec91d42e84643bd8b21179788235e79056c9b047"} Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.965742 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.975238 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-bgbtw" event={"ID":"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2","Type":"ContainerStarted","Data":"671a7b7e02a8969d6c8778766b1de862a5c16326e729be03b1c2ff650e8766fe"} Dec 02 17:03:54 crc kubenswrapper[4747]: I1202 17:03:54.989759 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7b56d86469-kh76t" podStartSLOduration=7.9897331430000005 podStartE2EDuration="7.989733143s" podCreationTimestamp="2025-12-02 17:03:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:03:54.985943785 +0000 UTC m=+1265.512832534" watchObservedRunningTime="2025-12-02 17:03:54.989733143 +0000 UTC m=+1265.516621892" Dec 02 17:03:55 crc kubenswrapper[4747]: I1202 17:03:55.021694 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-bgbtw" podStartSLOduration=3.34770369 podStartE2EDuration="49.021667941s" podCreationTimestamp="2025-12-02 17:03:06 +0000 UTC" firstStartedPulling="2025-12-02 17:03:07.703111088 +0000 UTC m=+1218.229999827" lastFinishedPulling="2025-12-02 17:03:53.377075329 +0000 UTC m=+1263.903964078" observedRunningTime="2025-12-02 17:03:55.019291224 +0000 UTC m=+1265.546179973" watchObservedRunningTime="2025-12-02 17:03:55.021667941 +0000 UTC m=+1265.548556690" Dec 02 17:03:55 crc kubenswrapper[4747]: I1202 17:03:55.150554 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:03:55 crc kubenswrapper[4747]: I1202 17:03:55.227685 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-smmcv"] Dec 02 17:03:55 crc kubenswrapper[4747]: I1202 17:03:55.229364 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" podUID="34b06f22-5fb1-465e-b20f-27a0cfb5bb35" containerName="dnsmasq-dns" containerID="cri-o://3f2032b8822d8aa7ab6105e46aa7a4ff3d59c810620a7fbe678fe1e7252aa2f9" gracePeriod=10 Dec 02 17:03:55 crc kubenswrapper[4747]: I1202 17:03:55.423730 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7d78c5c778-tg55f"] Dec 02 17:03:55 crc kubenswrapper[4747]: I1202 17:03:55.992088 4747 generic.go:334] "Generic (PLEG): container finished" podID="34b06f22-5fb1-465e-b20f-27a0cfb5bb35" containerID="3f2032b8822d8aa7ab6105e46aa7a4ff3d59c810620a7fbe678fe1e7252aa2f9" exitCode=0 Dec 02 17:03:55 crc kubenswrapper[4747]: I1202 17:03:55.992179 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" event={"ID":"34b06f22-5fb1-465e-b20f-27a0cfb5bb35","Type":"ContainerDied","Data":"3f2032b8822d8aa7ab6105e46aa7a4ff3d59c810620a7fbe678fe1e7252aa2f9"} Dec 02 17:03:56 crc kubenswrapper[4747]: I1202 17:03:56.042510 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" podUID="34b06f22-5fb1-465e-b20f-27a0cfb5bb35" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.141:5353: connect: connection refused" Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.038617 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7d78c5c778-tg55f" event={"ID":"687002d8-0be7-43ec-a1aa-61b724aec872","Type":"ContainerStarted","Data":"b8f078b34c2f0ab2e0175e1920e61a5e87d4ca7b934f3d2b4db72cd8ba370698"} Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.430291 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.528138 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-dns-svc\") pod \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.528250 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-config\") pod \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.528362 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-ovsdbserver-nb\") pod \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.528517 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-ovsdbserver-sb\") pod \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.528612 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6q5j7\" (UniqueName: \"kubernetes.io/projected/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-kube-api-access-6q5j7\") pod \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.528640 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-dns-swift-storage-0\") pod \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\" (UID: \"34b06f22-5fb1-465e-b20f-27a0cfb5bb35\") " Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.560113 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-kube-api-access-6q5j7" (OuterVolumeSpecName: "kube-api-access-6q5j7") pod "34b06f22-5fb1-465e-b20f-27a0cfb5bb35" (UID: "34b06f22-5fb1-465e-b20f-27a0cfb5bb35"). InnerVolumeSpecName "kube-api-access-6q5j7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.581574 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "34b06f22-5fb1-465e-b20f-27a0cfb5bb35" (UID: "34b06f22-5fb1-465e-b20f-27a0cfb5bb35"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.614462 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "34b06f22-5fb1-465e-b20f-27a0cfb5bb35" (UID: "34b06f22-5fb1-465e-b20f-27a0cfb5bb35"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.614898 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "34b06f22-5fb1-465e-b20f-27a0cfb5bb35" (UID: "34b06f22-5fb1-465e-b20f-27a0cfb5bb35"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.620431 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "34b06f22-5fb1-465e-b20f-27a0cfb5bb35" (UID: "34b06f22-5fb1-465e-b20f-27a0cfb5bb35"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.631667 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.631714 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6q5j7\" (UniqueName: \"kubernetes.io/projected/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-kube-api-access-6q5j7\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.631730 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.631742 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.631753 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.653608 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-config" (OuterVolumeSpecName: "config") pod "34b06f22-5fb1-465e-b20f-27a0cfb5bb35" (UID: "34b06f22-5fb1-465e-b20f-27a0cfb5bb35"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:03:57 crc kubenswrapper[4747]: I1202 17:03:57.733514 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34b06f22-5fb1-465e-b20f-27a0cfb5bb35-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:03:58 crc kubenswrapper[4747]: I1202 17:03:58.061584 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" event={"ID":"34b06f22-5fb1-465e-b20f-27a0cfb5bb35","Type":"ContainerDied","Data":"d4613a0f943250ad0b29ef507e18fbfd04bd79a64ad55821d0f84e59d8185c27"} Dec 02 17:03:58 crc kubenswrapper[4747]: I1202 17:03:58.062768 4747 scope.go:117] "RemoveContainer" containerID="3f2032b8822d8aa7ab6105e46aa7a4ff3d59c810620a7fbe678fe1e7252aa2f9" Dec 02 17:03:58 crc kubenswrapper[4747]: I1202 17:03:58.061944 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-smmcv" Dec 02 17:03:58 crc kubenswrapper[4747]: I1202 17:03:58.072493 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-6f4g4" event={"ID":"b3d0edb2-cca7-4f61-bf72-c3fddb909fea","Type":"ContainerStarted","Data":"63e459cef3c1eb70f665dfb15828b42966bc3b519f61f757c46be948d23a88e0"} Dec 02 17:03:58 crc kubenswrapper[4747]: I1202 17:03:58.086979 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7d78c5c778-tg55f" event={"ID":"687002d8-0be7-43ec-a1aa-61b724aec872","Type":"ContainerStarted","Data":"c9ff1a39a3cffd421848829b211fa665e33bb9f8d42c5d784662c4a4783729f6"} Dec 02 17:03:58 crc kubenswrapper[4747]: I1202 17:03:58.088022 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:03:58 crc kubenswrapper[4747]: I1202 17:03:58.113955 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-6f4g4" podStartSLOduration=3.808155649 podStartE2EDuration="53.113934043s" podCreationTimestamp="2025-12-02 17:03:05 +0000 UTC" firstStartedPulling="2025-12-02 17:03:07.702775498 +0000 UTC m=+1218.229664257" lastFinishedPulling="2025-12-02 17:03:57.008553902 +0000 UTC m=+1267.535442651" observedRunningTime="2025-12-02 17:03:58.110087413 +0000 UTC m=+1268.636976162" watchObservedRunningTime="2025-12-02 17:03:58.113934043 +0000 UTC m=+1268.640822792" Dec 02 17:03:58 crc kubenswrapper[4747]: I1202 17:03:58.160251 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-smmcv"] Dec 02 17:03:58 crc kubenswrapper[4747]: I1202 17:03:58.174593 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-smmcv"] Dec 02 17:03:58 crc kubenswrapper[4747]: I1202 17:03:58.182377 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7d78c5c778-tg55f" podStartSLOduration=4.182351839 podStartE2EDuration="4.182351839s" podCreationTimestamp="2025-12-02 17:03:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:03:58.161872057 +0000 UTC m=+1268.688760806" watchObservedRunningTime="2025-12-02 17:03:58.182351839 +0000 UTC m=+1268.709240588" Dec 02 17:03:58 crc kubenswrapper[4747]: I1202 17:03:58.190927 4747 scope.go:117] "RemoveContainer" containerID="f6e3f72a0b874effb25955ebc96e8be89c98dc92cdea4cae52a625c63154b3c3" Dec 02 17:03:59 crc kubenswrapper[4747]: I1202 17:03:59.772469 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34b06f22-5fb1-465e-b20f-27a0cfb5bb35" path="/var/lib/kubelet/pods/34b06f22-5fb1-465e-b20f-27a0cfb5bb35/volumes" Dec 02 17:04:01 crc kubenswrapper[4747]: I1202 17:04:01.135276 4747 generic.go:334] "Generic (PLEG): container finished" podID="3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2" containerID="671a7b7e02a8969d6c8778766b1de862a5c16326e729be03b1c2ff650e8766fe" exitCode=0 Dec 02 17:04:01 crc kubenswrapper[4747]: I1202 17:04:01.135352 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-bgbtw" event={"ID":"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2","Type":"ContainerDied","Data":"671a7b7e02a8969d6c8778766b1de862a5c16326e729be03b1c2ff650e8766fe"} Dec 02 17:04:01 crc kubenswrapper[4747]: I1202 17:04:01.137020 4747 generic.go:334] "Generic (PLEG): container finished" podID="b3d0edb2-cca7-4f61-bf72-c3fddb909fea" containerID="63e459cef3c1eb70f665dfb15828b42966bc3b519f61f757c46be948d23a88e0" exitCode=0 Dec 02 17:04:01 crc kubenswrapper[4747]: I1202 17:04:01.137048 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-6f4g4" event={"ID":"b3d0edb2-cca7-4f61-bf72-c3fddb909fea","Type":"ContainerDied","Data":"63e459cef3c1eb70f665dfb15828b42966bc3b519f61f757c46be948d23a88e0"} Dec 02 17:04:01 crc kubenswrapper[4747]: I1202 17:04:01.794997 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:04:01 crc kubenswrapper[4747]: I1202 17:04:01.795093 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:04:01 crc kubenswrapper[4747]: I1202 17:04:01.795161 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 17:04:01 crc kubenswrapper[4747]: I1202 17:04:01.796259 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3bcd3880ed49ebcd50724927f63a47903d028c91da930fb1b60778e7033a6140"} pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 17:04:01 crc kubenswrapper[4747]: I1202 17:04:01.796371 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" containerID="cri-o://3bcd3880ed49ebcd50724927f63a47903d028c91da930fb1b60778e7033a6140" gracePeriod=600 Dec 02 17:04:02 crc kubenswrapper[4747]: I1202 17:04:02.220097 4747 generic.go:334] "Generic (PLEG): container finished" podID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerID="3bcd3880ed49ebcd50724927f63a47903d028c91da930fb1b60778e7033a6140" exitCode=0 Dec 02 17:04:02 crc kubenswrapper[4747]: I1202 17:04:02.220168 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerDied","Data":"3bcd3880ed49ebcd50724927f63a47903d028c91da930fb1b60778e7033a6140"} Dec 02 17:04:02 crc kubenswrapper[4747]: I1202 17:04:02.220241 4747 scope.go:117] "RemoveContainer" containerID="dd912523cc5101dd05f8356cae810078de73ddb22c7e5af9901013b7a8b2bc0c" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.661923 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-6f4g4" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.665921 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.752092 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-db-sync-config-data\") pod \"b3d0edb2-cca7-4f61-bf72-c3fddb909fea\" (UID: \"b3d0edb2-cca7-4f61-bf72-c3fddb909fea\") " Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.752146 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-combined-ca-bundle\") pod \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.752176 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bfs7\" (UniqueName: \"kubernetes.io/projected/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-kube-api-access-5bfs7\") pod \"b3d0edb2-cca7-4f61-bf72-c3fddb909fea\" (UID: \"b3d0edb2-cca7-4f61-bf72-c3fddb909fea\") " Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.752226 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-etc-machine-id\") pod \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.752261 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-db-sync-config-data\") pod \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.752304 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-combined-ca-bundle\") pod \"b3d0edb2-cca7-4f61-bf72-c3fddb909fea\" (UID: \"b3d0edb2-cca7-4f61-bf72-c3fddb909fea\") " Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.752332 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6d62m\" (UniqueName: \"kubernetes.io/projected/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-kube-api-access-6d62m\") pod \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.752357 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-scripts\") pod \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.752408 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-config-data\") pod \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\" (UID: \"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2\") " Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.752787 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2" (UID: "3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.753042 4747 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.758574 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-scripts" (OuterVolumeSpecName: "scripts") pod "3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2" (UID: "3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.759533 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b3d0edb2-cca7-4f61-bf72-c3fddb909fea" (UID: "b3d0edb2-cca7-4f61-bf72-c3fddb909fea"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.764139 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-kube-api-access-5bfs7" (OuterVolumeSpecName: "kube-api-access-5bfs7") pod "b3d0edb2-cca7-4f61-bf72-c3fddb909fea" (UID: "b3d0edb2-cca7-4f61-bf72-c3fddb909fea"). InnerVolumeSpecName "kube-api-access-5bfs7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.764285 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-kube-api-access-6d62m" (OuterVolumeSpecName: "kube-api-access-6d62m") pod "3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2" (UID: "3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2"). InnerVolumeSpecName "kube-api-access-6d62m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.772018 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2" (UID: "3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.791059 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2" (UID: "3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.796063 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b3d0edb2-cca7-4f61-bf72-c3fddb909fea" (UID: "b3d0edb2-cca7-4f61-bf72-c3fddb909fea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.828579 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-config-data" (OuterVolumeSpecName: "config-data") pod "3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2" (UID: "3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.861050 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.861096 4747 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.861112 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.861126 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bfs7\" (UniqueName: \"kubernetes.io/projected/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-kube-api-access-5bfs7\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.861142 4747 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.861154 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3d0edb2-cca7-4f61-bf72-c3fddb909fea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.861165 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6d62m\" (UniqueName: \"kubernetes.io/projected/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-kube-api-access-6d62m\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:03 crc kubenswrapper[4747]: I1202 17:04:03.861178 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:04 crc kubenswrapper[4747]: I1202 17:04:04.248326 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-bgbtw" event={"ID":"3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2","Type":"ContainerDied","Data":"f1ac92c63c80f3857465dea8d6aa661fa7e6dd542affc35e420320dc47a08b96"} Dec 02 17:04:04 crc kubenswrapper[4747]: I1202 17:04:04.248639 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f1ac92c63c80f3857465dea8d6aa661fa7e6dd542affc35e420320dc47a08b96" Dec 02 17:04:04 crc kubenswrapper[4747]: I1202 17:04:04.248353 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-bgbtw" Dec 02 17:04:04 crc kubenswrapper[4747]: I1202 17:04:04.249482 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-6f4g4" event={"ID":"b3d0edb2-cca7-4f61-bf72-c3fddb909fea","Type":"ContainerDied","Data":"e49fb550ebf3a439175c6a6ca3615bbd087756f3c2871cbb46a640fe9e1ec76a"} Dec 02 17:04:04 crc kubenswrapper[4747]: I1202 17:04:04.249530 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e49fb550ebf3a439175c6a6ca3615bbd087756f3c2871cbb46a640fe9e1ec76a" Dec 02 17:04:04 crc kubenswrapper[4747]: I1202 17:04:04.249587 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-6f4g4" Dec 02 17:04:04 crc kubenswrapper[4747]: I1202 17:04:04.315076 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5cc747446d-fcnwd" podUID="b26d33d5-1b96-470b-8677-cb5273c72d25" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Dec 02 17:04:04 crc kubenswrapper[4747]: I1202 17:04:04.639667 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7c85649748-scrqf" podUID="4ec3f1d7-119b-40fa-b0f4-3d2f353ee162" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.008891 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 17:04:05 crc kubenswrapper[4747]: E1202 17:04:05.009779 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34b06f22-5fb1-465e-b20f-27a0cfb5bb35" containerName="init" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.009803 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="34b06f22-5fb1-465e-b20f-27a0cfb5bb35" containerName="init" Dec 02 17:04:05 crc kubenswrapper[4747]: E1202 17:04:05.009826 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2" containerName="cinder-db-sync" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.009836 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2" containerName="cinder-db-sync" Dec 02 17:04:05 crc kubenswrapper[4747]: E1202 17:04:05.009851 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3d0edb2-cca7-4f61-bf72-c3fddb909fea" containerName="barbican-db-sync" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.009859 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3d0edb2-cca7-4f61-bf72-c3fddb909fea" containerName="barbican-db-sync" Dec 02 17:04:05 crc kubenswrapper[4747]: E1202 17:04:05.009881 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34b06f22-5fb1-465e-b20f-27a0cfb5bb35" containerName="dnsmasq-dns" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.009887 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="34b06f22-5fb1-465e-b20f-27a0cfb5bb35" containerName="dnsmasq-dns" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.010100 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="34b06f22-5fb1-465e-b20f-27a0cfb5bb35" containerName="dnsmasq-dns" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.010120 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2" containerName="cinder-db-sync" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.010142 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3d0edb2-cca7-4f61-bf72-c3fddb909fea" containerName="barbican-db-sync" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.011526 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.021648 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-jzp2t" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.021973 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.022135 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.022841 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.037137 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-7c69975b48-9pn7p"] Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.050204 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.056523 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-qjkbj" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.056807 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.057011 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.065246 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.086987 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7c69975b48-9pn7p"] Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.091112 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.091181 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6s5n\" (UniqueName: \"kubernetes.io/projected/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-kube-api-access-k6s5n\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.091237 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.091330 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf9be744-ba2f-43f6-bb0c-ab806681aeb2-config-data\") pod \"barbican-keystone-listener-7c69975b48-9pn7p\" (UID: \"cf9be744-ba2f-43f6-bb0c-ab806681aeb2\") " pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.091368 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-config-data\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.091424 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf9be744-ba2f-43f6-bb0c-ab806681aeb2-combined-ca-bundle\") pod \"barbican-keystone-listener-7c69975b48-9pn7p\" (UID: \"cf9be744-ba2f-43f6-bb0c-ab806681aeb2\") " pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.091464 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv74j\" (UniqueName: \"kubernetes.io/projected/cf9be744-ba2f-43f6-bb0c-ab806681aeb2-kube-api-access-fv74j\") pod \"barbican-keystone-listener-7c69975b48-9pn7p\" (UID: \"cf9be744-ba2f-43f6-bb0c-ab806681aeb2\") " pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.091506 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf9be744-ba2f-43f6-bb0c-ab806681aeb2-config-data-custom\") pod \"barbican-keystone-listener-7c69975b48-9pn7p\" (UID: \"cf9be744-ba2f-43f6-bb0c-ab806681aeb2\") " pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.091555 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.091581 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf9be744-ba2f-43f6-bb0c-ab806681aeb2-logs\") pod \"barbican-keystone-listener-7c69975b48-9pn7p\" (UID: \"cf9be744-ba2f-43f6-bb0c-ab806681aeb2\") " pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.091631 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-scripts\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.133990 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-57f999f495-w8tt5"] Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.136000 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-57f999f495-w8tt5" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.147292 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.156967 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-57f999f495-w8tt5"] Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.194970 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf9be744-ba2f-43f6-bb0c-ab806681aeb2-combined-ca-bundle\") pod \"barbican-keystone-listener-7c69975b48-9pn7p\" (UID: \"cf9be744-ba2f-43f6-bb0c-ab806681aeb2\") " pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.195237 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65391885-7b23-49fc-b435-ea21781c2f4d-logs\") pod \"barbican-worker-57f999f495-w8tt5\" (UID: \"65391885-7b23-49fc-b435-ea21781c2f4d\") " pod="openstack/barbican-worker-57f999f495-w8tt5" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.195312 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv74j\" (UniqueName: \"kubernetes.io/projected/cf9be744-ba2f-43f6-bb0c-ab806681aeb2-kube-api-access-fv74j\") pod \"barbican-keystone-listener-7c69975b48-9pn7p\" (UID: \"cf9be744-ba2f-43f6-bb0c-ab806681aeb2\") " pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.195420 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnmhj\" (UniqueName: \"kubernetes.io/projected/65391885-7b23-49fc-b435-ea21781c2f4d-kube-api-access-jnmhj\") pod \"barbican-worker-57f999f495-w8tt5\" (UID: \"65391885-7b23-49fc-b435-ea21781c2f4d\") " pod="openstack/barbican-worker-57f999f495-w8tt5" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.195517 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf9be744-ba2f-43f6-bb0c-ab806681aeb2-config-data-custom\") pod \"barbican-keystone-listener-7c69975b48-9pn7p\" (UID: \"cf9be744-ba2f-43f6-bb0c-ab806681aeb2\") " pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.195586 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65391885-7b23-49fc-b435-ea21781c2f4d-combined-ca-bundle\") pod \"barbican-worker-57f999f495-w8tt5\" (UID: \"65391885-7b23-49fc-b435-ea21781c2f4d\") " pod="openstack/barbican-worker-57f999f495-w8tt5" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.195663 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.195743 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf9be744-ba2f-43f6-bb0c-ab806681aeb2-logs\") pod \"barbican-keystone-listener-7c69975b48-9pn7p\" (UID: \"cf9be744-ba2f-43f6-bb0c-ab806681aeb2\") " pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.195832 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-scripts\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.195922 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.195993 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6s5n\" (UniqueName: \"kubernetes.io/projected/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-kube-api-access-k6s5n\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.196066 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65391885-7b23-49fc-b435-ea21781c2f4d-config-data-custom\") pod \"barbican-worker-57f999f495-w8tt5\" (UID: \"65391885-7b23-49fc-b435-ea21781c2f4d\") " pod="openstack/barbican-worker-57f999f495-w8tt5" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.196140 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.196209 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65391885-7b23-49fc-b435-ea21781c2f4d-config-data\") pod \"barbican-worker-57f999f495-w8tt5\" (UID: \"65391885-7b23-49fc-b435-ea21781c2f4d\") " pod="openstack/barbican-worker-57f999f495-w8tt5" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.196313 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf9be744-ba2f-43f6-bb0c-ab806681aeb2-config-data\") pod \"barbican-keystone-listener-7c69975b48-9pn7p\" (UID: \"cf9be744-ba2f-43f6-bb0c-ab806681aeb2\") " pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.196381 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-config-data\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.203698 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d68b9cb4c-dsjg9"] Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.205275 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.206799 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf9be744-ba2f-43f6-bb0c-ab806681aeb2-logs\") pod \"barbican-keystone-listener-7c69975b48-9pn7p\" (UID: \"cf9be744-ba2f-43f6-bb0c-ab806681aeb2\") " pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.207392 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-config-data\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.207851 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.218686 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf9be744-ba2f-43f6-bb0c-ab806681aeb2-combined-ca-bundle\") pod \"barbican-keystone-listener-7c69975b48-9pn7p\" (UID: \"cf9be744-ba2f-43f6-bb0c-ab806681aeb2\") " pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.219481 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.221165 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf9be744-ba2f-43f6-bb0c-ab806681aeb2-config-data\") pod \"barbican-keystone-listener-7c69975b48-9pn7p\" (UID: \"cf9be744-ba2f-43f6-bb0c-ab806681aeb2\") " pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.221561 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-scripts\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.226511 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf9be744-ba2f-43f6-bb0c-ab806681aeb2-config-data-custom\") pod \"barbican-keystone-listener-7c69975b48-9pn7p\" (UID: \"cf9be744-ba2f-43f6-bb0c-ab806681aeb2\") " pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.241442 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv74j\" (UniqueName: \"kubernetes.io/projected/cf9be744-ba2f-43f6-bb0c-ab806681aeb2-kube-api-access-fv74j\") pod \"barbican-keystone-listener-7c69975b48-9pn7p\" (UID: \"cf9be744-ba2f-43f6-bb0c-ab806681aeb2\") " pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.248573 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.266600 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6s5n\" (UniqueName: \"kubernetes.io/projected/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-kube-api-access-k6s5n\") pod \"cinder-scheduler-0\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.266676 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d68b9cb4c-dsjg9"] Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.299181 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-dns-swift-storage-0\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.299232 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65391885-7b23-49fc-b435-ea21781c2f4d-config-data-custom\") pod \"barbican-worker-57f999f495-w8tt5\" (UID: \"65391885-7b23-49fc-b435-ea21781c2f4d\") " pod="openstack/barbican-worker-57f999f495-w8tt5" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.299258 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65391885-7b23-49fc-b435-ea21781c2f4d-config-data\") pod \"barbican-worker-57f999f495-w8tt5\" (UID: \"65391885-7b23-49fc-b435-ea21781c2f4d\") " pod="openstack/barbican-worker-57f999f495-w8tt5" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.299275 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-ovsdbserver-sb\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.299292 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-dns-svc\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.299322 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-config\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.299384 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9xxc\" (UniqueName: \"kubernetes.io/projected/797476c5-9530-40d5-87cb-58125f967fe6-kube-api-access-l9xxc\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.299400 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65391885-7b23-49fc-b435-ea21781c2f4d-logs\") pod \"barbican-worker-57f999f495-w8tt5\" (UID: \"65391885-7b23-49fc-b435-ea21781c2f4d\") " pod="openstack/barbican-worker-57f999f495-w8tt5" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.299420 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnmhj\" (UniqueName: \"kubernetes.io/projected/65391885-7b23-49fc-b435-ea21781c2f4d-kube-api-access-jnmhj\") pod \"barbican-worker-57f999f495-w8tt5\" (UID: \"65391885-7b23-49fc-b435-ea21781c2f4d\") " pod="openstack/barbican-worker-57f999f495-w8tt5" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.299447 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-ovsdbserver-nb\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.299472 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65391885-7b23-49fc-b435-ea21781c2f4d-combined-ca-bundle\") pod \"barbican-worker-57f999f495-w8tt5\" (UID: \"65391885-7b23-49fc-b435-ea21781c2f4d\") " pod="openstack/barbican-worker-57f999f495-w8tt5" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.307692 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"a9cd5048f2dad4e3a491049097b9d8740c67cd00c6933cd94235f0d88bac7953"} Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.309685 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65391885-7b23-49fc-b435-ea21781c2f4d-logs\") pod \"barbican-worker-57f999f495-w8tt5\" (UID: \"65391885-7b23-49fc-b435-ea21781c2f4d\") " pod="openstack/barbican-worker-57f999f495-w8tt5" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.317063 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65391885-7b23-49fc-b435-ea21781c2f4d-combined-ca-bundle\") pod \"barbican-worker-57f999f495-w8tt5\" (UID: \"65391885-7b23-49fc-b435-ea21781c2f4d\") " pod="openstack/barbican-worker-57f999f495-w8tt5" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.317233 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65391885-7b23-49fc-b435-ea21781c2f4d-config-data\") pod \"barbican-worker-57f999f495-w8tt5\" (UID: \"65391885-7b23-49fc-b435-ea21781c2f4d\") " pod="openstack/barbican-worker-57f999f495-w8tt5" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.317718 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65391885-7b23-49fc-b435-ea21781c2f4d-config-data-custom\") pod \"barbican-worker-57f999f495-w8tt5\" (UID: \"65391885-7b23-49fc-b435-ea21781c2f4d\") " pod="openstack/barbican-worker-57f999f495-w8tt5" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.319828 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"210762bb-c409-4ed0-af37-4e70f2c7c955","Type":"ContainerStarted","Data":"1b04ae22b9c96e3ac7f64d88ed4695cc04c49db66143b17d7b7c24d4dba90371"} Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.320030 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerName="ceilometer-central-agent" containerID="cri-o://d7b83f13de7d9765e6fa532e061fb77c43f20f6c68ce8ba5959efba12a03f098" gracePeriod=30 Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.320337 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.320397 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerName="proxy-httpd" containerID="cri-o://1b04ae22b9c96e3ac7f64d88ed4695cc04c49db66143b17d7b7c24d4dba90371" gracePeriod=30 Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.320446 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerName="sg-core" containerID="cri-o://47afa13ede895aa4912bf6a9039ad232fe30afdb00f9a44fbdc816196a3adad2" gracePeriod=30 Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.320477 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerName="ceilometer-notification-agent" containerID="cri-o://4b3de528fe544733b59a27fd0ede0471796837f58ac4df3491c6fe0b7a5190eb" gracePeriod=30 Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.389765 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.392427 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnmhj\" (UniqueName: \"kubernetes.io/projected/65391885-7b23-49fc-b435-ea21781c2f4d-kube-api-access-jnmhj\") pod \"barbican-worker-57f999f495-w8tt5\" (UID: \"65391885-7b23-49fc-b435-ea21781c2f4d\") " pod="openstack/barbican-worker-57f999f495-w8tt5" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.397295 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.398536 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d68b9cb4c-dsjg9"] Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.401224 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9xxc\" (UniqueName: \"kubernetes.io/projected/797476c5-9530-40d5-87cb-58125f967fe6-kube-api-access-l9xxc\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.401287 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-ovsdbserver-nb\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.401371 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-dns-swift-storage-0\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.401404 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-dns-svc\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.401426 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-ovsdbserver-sb\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.401469 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-config\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.402691 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-config\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.403099 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-ovsdbserver-nb\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.403304 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-dns-swift-storage-0\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.403792 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-ovsdbserver-sb\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.413137 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-dns-svc\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.447980 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6b66855878-jbxkj"] Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.453594 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.461401 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.463369 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9xxc\" (UniqueName: \"kubernetes.io/projected/797476c5-9530-40d5-87cb-58125f967fe6-kube-api-access-l9xxc\") pod \"dnsmasq-dns-d68b9cb4c-dsjg9\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.480972 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-57f999f495-w8tt5" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.485997 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6b66855878-jbxkj"] Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.505297 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-logs\") pod \"barbican-api-6b66855878-jbxkj\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.505373 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-config-data\") pod \"barbican-api-6b66855878-jbxkj\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.505405 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf49h\" (UniqueName: \"kubernetes.io/projected/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-kube-api-access-zf49h\") pod \"barbican-api-6b66855878-jbxkj\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.505510 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-combined-ca-bundle\") pod \"barbican-api-6b66855878-jbxkj\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.505544 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-config-data-custom\") pod \"barbican-api-6b66855878-jbxkj\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.505666 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-lqspp"] Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.507237 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.553028 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-lqspp"] Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.570774 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.337045836 podStartE2EDuration="1m0.570750066s" podCreationTimestamp="2025-12-02 17:03:05 +0000 UTC" firstStartedPulling="2025-12-02 17:03:07.071541859 +0000 UTC m=+1217.598430608" lastFinishedPulling="2025-12-02 17:04:04.305246099 +0000 UTC m=+1274.832134838" observedRunningTime="2025-12-02 17:04:05.435274972 +0000 UTC m=+1275.962163721" watchObservedRunningTime="2025-12-02 17:04:05.570750066 +0000 UTC m=+1276.097638815" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.591258 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.614078 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.615486 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.618284 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.621880 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-logs\") pod \"barbican-api-6b66855878-jbxkj\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.641119 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.641276 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-config-data\") pod \"barbican-api-6b66855878-jbxkj\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.641317 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf49h\" (UniqueName: \"kubernetes.io/projected/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-kube-api-access-zf49h\") pod \"barbican-api-6b66855878-jbxkj\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.641388 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.641482 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbf5q\" (UniqueName: \"kubernetes.io/projected/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-kube-api-access-rbf5q\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.641534 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-dns-svc\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.641658 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-config\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.641813 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-combined-ca-bundle\") pod \"barbican-api-6b66855878-jbxkj\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.641882 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-config-data-custom\") pod \"barbican-api-6b66855878-jbxkj\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.641937 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.636197 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-logs\") pod \"barbican-api-6b66855878-jbxkj\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.678664 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-combined-ca-bundle\") pod \"barbican-api-6b66855878-jbxkj\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.715216 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-config-data\") pod \"barbican-api-6b66855878-jbxkj\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.723574 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf49h\" (UniqueName: \"kubernetes.io/projected/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-kube-api-access-zf49h\") pod \"barbican-api-6b66855878-jbxkj\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.727989 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.728727 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-config-data-custom\") pod \"barbican-api-6b66855878-jbxkj\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.749111 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-config\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.749249 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6430c7e8-1065-422d-b512-28f3c2356d6a-logs\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.749279 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.749327 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-config-data-custom\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.749354 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6430c7e8-1065-422d-b512-28f3c2356d6a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.749376 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-scripts\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.749408 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.749435 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tk6zf\" (UniqueName: \"kubernetes.io/projected/6430c7e8-1065-422d-b512-28f3c2356d6a-kube-api-access-tk6zf\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.749491 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.749515 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-config-data\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.749537 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.749578 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbf5q\" (UniqueName: \"kubernetes.io/projected/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-kube-api-access-rbf5q\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.749607 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-dns-svc\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.750772 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-dns-svc\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.750779 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-config\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.751667 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.751812 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.797649 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbf5q\" (UniqueName: \"kubernetes.io/projected/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-kube-api-access-rbf5q\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.800238 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-lqspp\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.874204 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6430c7e8-1065-422d-b512-28f3c2356d6a-logs\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.874495 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-config-data-custom\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.874568 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6430c7e8-1065-422d-b512-28f3c2356d6a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.874599 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-scripts\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.874678 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tk6zf\" (UniqueName: \"kubernetes.io/projected/6430c7e8-1065-422d-b512-28f3c2356d6a-kube-api-access-tk6zf\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.874803 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-config-data\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.874829 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.875691 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6430c7e8-1065-422d-b512-28f3c2356d6a-logs\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.876179 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6430c7e8-1065-422d-b512-28f3c2356d6a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.879882 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-scripts\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.881186 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.902311 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-config-data-custom\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.902834 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tk6zf\" (UniqueName: \"kubernetes.io/projected/6430c7e8-1065-422d-b512-28f3c2356d6a-kube-api-access-tk6zf\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.909838 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-config-data\") pod \"cinder-api-0\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " pod="openstack/cinder-api-0" Dec 02 17:04:05 crc kubenswrapper[4747]: I1202 17:04:05.983681 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:06 crc kubenswrapper[4747]: I1202 17:04:06.034847 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:06 crc kubenswrapper[4747]: I1202 17:04:06.129596 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 17:04:06 crc kubenswrapper[4747]: I1202 17:04:06.182640 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 17:04:06 crc kubenswrapper[4747]: I1202 17:04:06.396238 4747 generic.go:334] "Generic (PLEG): container finished" podID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerID="47afa13ede895aa4912bf6a9039ad232fe30afdb00f9a44fbdc816196a3adad2" exitCode=2 Dec 02 17:04:06 crc kubenswrapper[4747]: I1202 17:04:06.396736 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"210762bb-c409-4ed0-af37-4e70f2c7c955","Type":"ContainerDied","Data":"47afa13ede895aa4912bf6a9039ad232fe30afdb00f9a44fbdc816196a3adad2"} Dec 02 17:04:06 crc kubenswrapper[4747]: I1202 17:04:06.417487 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5","Type":"ContainerStarted","Data":"04391d3864ae463d975e738a402ea957cb576502de24e82a4cf1aa24150e0f8c"} Dec 02 17:04:06 crc kubenswrapper[4747]: I1202 17:04:06.460109 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-57f999f495-w8tt5"] Dec 02 17:04:06 crc kubenswrapper[4747]: I1202 17:04:06.630938 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7c69975b48-9pn7p"] Dec 02 17:04:06 crc kubenswrapper[4747]: I1202 17:04:06.659444 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d68b9cb4c-dsjg9"] Dec 02 17:04:06 crc kubenswrapper[4747]: W1202 17:04:06.681207 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod797476c5_9530_40d5_87cb_58125f967fe6.slice/crio-83adcbe1471381a8a80b16f169859c8f1ca583fc89bdb0ee3aff32cbc4b3ee50 WatchSource:0}: Error finding container 83adcbe1471381a8a80b16f169859c8f1ca583fc89bdb0ee3aff32cbc4b3ee50: Status 404 returned error can't find the container with id 83adcbe1471381a8a80b16f169859c8f1ca583fc89bdb0ee3aff32cbc4b3ee50 Dec 02 17:04:06 crc kubenswrapper[4747]: I1202 17:04:06.884957 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6b66855878-jbxkj"] Dec 02 17:04:06 crc kubenswrapper[4747]: W1202 17:04:06.936536 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a3e75c6_fbd8_4c12_9d38_8c9684b13d15.slice/crio-54a18bcb0789214cdc0e007c46311015b0b9c90d3af8d9fc418983059ec3f33e WatchSource:0}: Error finding container 54a18bcb0789214cdc0e007c46311015b0b9c90d3af8d9fc418983059ec3f33e: Status 404 returned error can't find the container with id 54a18bcb0789214cdc0e007c46311015b0b9c90d3af8d9fc418983059ec3f33e Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.018572 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.039633 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-lqspp"] Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.440142 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" event={"ID":"cf9be744-ba2f-43f6-bb0c-ab806681aeb2","Type":"ContainerStarted","Data":"01a63faee25e8b5f2887553f2892405ecb51ea2e8b77e1e50f35bdd146e2be4a"} Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.448061 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6430c7e8-1065-422d-b512-28f3c2356d6a","Type":"ContainerStarted","Data":"c1ca1daf1437fa31d806279db8a08149602ca29a92914565f9793bdab4d274a7"} Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.454029 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b66855878-jbxkj" event={"ID":"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15","Type":"ContainerStarted","Data":"54a18bcb0789214cdc0e007c46311015b0b9c90d3af8d9fc418983059ec3f33e"} Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.461064 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-lqspp" event={"ID":"2ec58fad-2ebd-4536-ae98-1f4537cbbffd","Type":"ContainerStarted","Data":"8ba8b1277992974abf097659a8db2641d3c021cb0b2365efb0494f183cd31f63"} Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.470668 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-57f999f495-w8tt5" event={"ID":"65391885-7b23-49fc-b435-ea21781c2f4d","Type":"ContainerStarted","Data":"fcc01f4463c4b6933c76e29083281179f4343b0fe747e5919ebdd1b24ab464bc"} Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.488654 4747 generic.go:334] "Generic (PLEG): container finished" podID="797476c5-9530-40d5-87cb-58125f967fe6" containerID="ca1359fa71edc945672e5c06b6e312b6693fb0adab75ec122be0038b2965726c" exitCode=0 Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.488813 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" event={"ID":"797476c5-9530-40d5-87cb-58125f967fe6","Type":"ContainerDied","Data":"ca1359fa71edc945672e5c06b6e312b6693fb0adab75ec122be0038b2965726c"} Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.488850 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" event={"ID":"797476c5-9530-40d5-87cb-58125f967fe6","Type":"ContainerStarted","Data":"83adcbe1471381a8a80b16f169859c8f1ca583fc89bdb0ee3aff32cbc4b3ee50"} Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.503220 4747 generic.go:334] "Generic (PLEG): container finished" podID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerID="1b04ae22b9c96e3ac7f64d88ed4695cc04c49db66143b17d7b7c24d4dba90371" exitCode=0 Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.503261 4747 generic.go:334] "Generic (PLEG): container finished" podID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerID="4b3de528fe544733b59a27fd0ede0471796837f58ac4df3491c6fe0b7a5190eb" exitCode=0 Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.503270 4747 generic.go:334] "Generic (PLEG): container finished" podID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerID="d7b83f13de7d9765e6fa532e061fb77c43f20f6c68ce8ba5959efba12a03f098" exitCode=0 Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.503317 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"210762bb-c409-4ed0-af37-4e70f2c7c955","Type":"ContainerDied","Data":"1b04ae22b9c96e3ac7f64d88ed4695cc04c49db66143b17d7b7c24d4dba90371"} Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.503399 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"210762bb-c409-4ed0-af37-4e70f2c7c955","Type":"ContainerDied","Data":"4b3de528fe544733b59a27fd0ede0471796837f58ac4df3491c6fe0b7a5190eb"} Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.503419 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"210762bb-c409-4ed0-af37-4e70f2c7c955","Type":"ContainerDied","Data":"d7b83f13de7d9765e6fa532e061fb77c43f20f6c68ce8ba5959efba12a03f098"} Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.503432 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"210762bb-c409-4ed0-af37-4e70f2c7c955","Type":"ContainerDied","Data":"5e755634c093ed580c885ba8f2134a4fc62444ab047cdcc74f093478e332fe66"} Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.503446 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e755634c093ed580c885ba8f2134a4fc62444ab047cdcc74f093478e332fe66" Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.616273 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.723580 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/210762bb-c409-4ed0-af37-4e70f2c7c955-log-httpd\") pod \"210762bb-c409-4ed0-af37-4e70f2c7c955\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.724583 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-scripts\") pod \"210762bb-c409-4ed0-af37-4e70f2c7c955\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.724700 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/210762bb-c409-4ed0-af37-4e70f2c7c955-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "210762bb-c409-4ed0-af37-4e70f2c7c955" (UID: "210762bb-c409-4ed0-af37-4e70f2c7c955"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.724889 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-combined-ca-bundle\") pod \"210762bb-c409-4ed0-af37-4e70f2c7c955\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.725240 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-config-data\") pod \"210762bb-c409-4ed0-af37-4e70f2c7c955\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.725309 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-sg-core-conf-yaml\") pod \"210762bb-c409-4ed0-af37-4e70f2c7c955\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.725340 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/210762bb-c409-4ed0-af37-4e70f2c7c955-run-httpd\") pod \"210762bb-c409-4ed0-af37-4e70f2c7c955\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.725402 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvs2g\" (UniqueName: \"kubernetes.io/projected/210762bb-c409-4ed0-af37-4e70f2c7c955-kube-api-access-kvs2g\") pod \"210762bb-c409-4ed0-af37-4e70f2c7c955\" (UID: \"210762bb-c409-4ed0-af37-4e70f2c7c955\") " Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.728926 4747 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/210762bb-c409-4ed0-af37-4e70f2c7c955-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.730980 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/210762bb-c409-4ed0-af37-4e70f2c7c955-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "210762bb-c409-4ed0-af37-4e70f2c7c955" (UID: "210762bb-c409-4ed0-af37-4e70f2c7c955"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.735041 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-scripts" (OuterVolumeSpecName: "scripts") pod "210762bb-c409-4ed0-af37-4e70f2c7c955" (UID: "210762bb-c409-4ed0-af37-4e70f2c7c955"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.736134 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210762bb-c409-4ed0-af37-4e70f2c7c955-kube-api-access-kvs2g" (OuterVolumeSpecName: "kube-api-access-kvs2g") pod "210762bb-c409-4ed0-af37-4e70f2c7c955" (UID: "210762bb-c409-4ed0-af37-4e70f2c7c955"). InnerVolumeSpecName "kube-api-access-kvs2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.830669 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.830920 4747 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/210762bb-c409-4ed0-af37-4e70f2c7c955-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.831115 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvs2g\" (UniqueName: \"kubernetes.io/projected/210762bb-c409-4ed0-af37-4e70f2c7c955-kube-api-access-kvs2g\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.867496 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "210762bb-c409-4ed0-af37-4e70f2c7c955" (UID: "210762bb-c409-4ed0-af37-4e70f2c7c955"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.901540 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "210762bb-c409-4ed0-af37-4e70f2c7c955" (UID: "210762bb-c409-4ed0-af37-4e70f2c7c955"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.933315 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.933358 4747 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:07 crc kubenswrapper[4747]: I1202 17:04:07.972073 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-config-data" (OuterVolumeSpecName: "config-data") pod "210762bb-c409-4ed0-af37-4e70f2c7c955" (UID: "210762bb-c409-4ed0-af37-4e70f2c7c955"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.035266 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/210762bb-c409-4ed0-af37-4e70f2c7c955-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.362091 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.550961 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6430c7e8-1065-422d-b512-28f3c2356d6a","Type":"ContainerStarted","Data":"7cabf4deb964e785602c85cb53ed328f1a2bade12a410a391e6a9ce09fd0465a"} Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.564402 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b66855878-jbxkj" event={"ID":"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15","Type":"ContainerStarted","Data":"bd7cecf7661738a2db4fd91fc0e6317f281a3932dd1a5cc1e928e1b669d66716"} Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.581969 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5","Type":"ContainerStarted","Data":"7b5e91aba3e47ff9c6eff02b4ee9857b921c5898450a7c5caf06adb3d4c51869"} Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.601578 4747 generic.go:334] "Generic (PLEG): container finished" podID="2ec58fad-2ebd-4536-ae98-1f4537cbbffd" containerID="b66aeb84d7ee2e9a28e52933f514956c9094fecaf40f053c2ce02277c8d795ca" exitCode=0 Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.601695 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.603089 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-lqspp" event={"ID":"2ec58fad-2ebd-4536-ae98-1f4537cbbffd","Type":"ContainerDied","Data":"b66aeb84d7ee2e9a28e52933f514956c9094fecaf40f053c2ce02277c8d795ca"} Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.704994 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.726424 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.754731 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:08 crc kubenswrapper[4747]: E1202 17:04:08.757512 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerName="ceilometer-notification-agent" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.757555 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerName="ceilometer-notification-agent" Dec 02 17:04:08 crc kubenswrapper[4747]: E1202 17:04:08.757579 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerName="ceilometer-central-agent" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.757588 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerName="ceilometer-central-agent" Dec 02 17:04:08 crc kubenswrapper[4747]: E1202 17:04:08.757636 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerName="proxy-httpd" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.757650 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerName="proxy-httpd" Dec 02 17:04:08 crc kubenswrapper[4747]: E1202 17:04:08.757670 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerName="sg-core" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.757678 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerName="sg-core" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.758308 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerName="proxy-httpd" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.758347 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerName="ceilometer-central-agent" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.758357 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerName="ceilometer-notification-agent" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.758382 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="210762bb-c409-4ed0-af37-4e70f2c7c955" containerName="sg-core" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.766107 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.787207 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.789017 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.855515 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.872377 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/315b434d-0877-4071-9e78-0b5b659edc47-log-httpd\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.872535 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-config-data\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.872588 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.872635 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-scripts\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.872744 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgfmk\" (UniqueName: \"kubernetes.io/projected/315b434d-0877-4071-9e78-0b5b659edc47-kube-api-access-vgfmk\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.872780 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.872813 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/315b434d-0877-4071-9e78-0b5b659edc47-run-httpd\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.978570 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-config-data\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.978860 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.979005 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-scripts\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.979497 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgfmk\" (UniqueName: \"kubernetes.io/projected/315b434d-0877-4071-9e78-0b5b659edc47-kube-api-access-vgfmk\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.979648 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.979794 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/315b434d-0877-4071-9e78-0b5b659edc47-run-httpd\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.979983 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/315b434d-0877-4071-9e78-0b5b659edc47-log-httpd\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.980785 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/315b434d-0877-4071-9e78-0b5b659edc47-log-httpd\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.986889 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-config-data\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.988620 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/315b434d-0877-4071-9e78-0b5b659edc47-run-httpd\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.989011 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.989496 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-scripts\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:08 crc kubenswrapper[4747]: I1202 17:04:08.991761 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.029138 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgfmk\" (UniqueName: \"kubernetes.io/projected/315b434d-0877-4071-9e78-0b5b659edc47-kube-api-access-vgfmk\") pod \"ceilometer-0\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " pod="openstack/ceilometer-0" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.130332 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.534220 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.612609 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-ovsdbserver-nb\") pod \"797476c5-9530-40d5-87cb-58125f967fe6\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.613114 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-ovsdbserver-sb\") pod \"797476c5-9530-40d5-87cb-58125f967fe6\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.613174 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-dns-swift-storage-0\") pod \"797476c5-9530-40d5-87cb-58125f967fe6\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.613290 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-dns-svc\") pod \"797476c5-9530-40d5-87cb-58125f967fe6\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.613399 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-config\") pod \"797476c5-9530-40d5-87cb-58125f967fe6\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.613451 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9xxc\" (UniqueName: \"kubernetes.io/projected/797476c5-9530-40d5-87cb-58125f967fe6-kube-api-access-l9xxc\") pod \"797476c5-9530-40d5-87cb-58125f967fe6\" (UID: \"797476c5-9530-40d5-87cb-58125f967fe6\") " Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.660485 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/797476c5-9530-40d5-87cb-58125f967fe6-kube-api-access-l9xxc" (OuterVolumeSpecName: "kube-api-access-l9xxc") pod "797476c5-9530-40d5-87cb-58125f967fe6" (UID: "797476c5-9530-40d5-87cb-58125f967fe6"). InnerVolumeSpecName "kube-api-access-l9xxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.684798 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" event={"ID":"797476c5-9530-40d5-87cb-58125f967fe6","Type":"ContainerDied","Data":"83adcbe1471381a8a80b16f169859c8f1ca583fc89bdb0ee3aff32cbc4b3ee50"} Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.684866 4747 scope.go:117] "RemoveContainer" containerID="ca1359fa71edc945672e5c06b6e312b6693fb0adab75ec122be0038b2965726c" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.685085 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d68b9cb4c-dsjg9" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.714575 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "797476c5-9530-40d5-87cb-58125f967fe6" (UID: "797476c5-9530-40d5-87cb-58125f967fe6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.716603 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.716634 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9xxc\" (UniqueName: \"kubernetes.io/projected/797476c5-9530-40d5-87cb-58125f967fe6-kube-api-access-l9xxc\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.740835 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "797476c5-9530-40d5-87cb-58125f967fe6" (UID: "797476c5-9530-40d5-87cb-58125f967fe6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.752652 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "797476c5-9530-40d5-87cb-58125f967fe6" (UID: "797476c5-9530-40d5-87cb-58125f967fe6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.799205 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210762bb-c409-4ed0-af37-4e70f2c7c955" path="/var/lib/kubelet/pods/210762bb-c409-4ed0-af37-4e70f2c7c955/volumes" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.819227 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.819276 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.823478 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-config" (OuterVolumeSpecName: "config") pod "797476c5-9530-40d5-87cb-58125f967fe6" (UID: "797476c5-9530-40d5-87cb-58125f967fe6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.824490 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "797476c5-9530-40d5-87cb-58125f967fe6" (UID: "797476c5-9530-40d5-87cb-58125f967fe6"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.926642 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.926687 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/797476c5-9530-40d5-87cb-58125f967fe6-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:09 crc kubenswrapper[4747]: I1202 17:04:09.960477 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:09 crc kubenswrapper[4747]: W1202 17:04:09.965708 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod315b434d_0877_4071_9e78_0b5b659edc47.slice/crio-5b321f251c22b5cceba19259b2c91f58425efdf0fbfb3d8503d06df8b8a7f6e7 WatchSource:0}: Error finding container 5b321f251c22b5cceba19259b2c91f58425efdf0fbfb3d8503d06df8b8a7f6e7: Status 404 returned error can't find the container with id 5b321f251c22b5cceba19259b2c91f58425efdf0fbfb3d8503d06df8b8a7f6e7 Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.097816 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d68b9cb4c-dsjg9"] Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.114972 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d68b9cb4c-dsjg9"] Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.722094 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" event={"ID":"cf9be744-ba2f-43f6-bb0c-ab806681aeb2","Type":"ContainerStarted","Data":"0ac200bfa09ad245ff3302bbf17533f15d08340482bc8e95afeee944d6370dd5"} Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.722423 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" event={"ID":"cf9be744-ba2f-43f6-bb0c-ab806681aeb2","Type":"ContainerStarted","Data":"1f84b25927ae4e944eb1f21881d1f403089a95349b2123044945a58302d4c65a"} Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.735191 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b66855878-jbxkj" event={"ID":"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15","Type":"ContainerStarted","Data":"2aa18e15f149a7e37311e561636a6c25013d4ac84b19719e87849843f4bdb0f5"} Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.736061 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.736347 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.746417 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-7c69975b48-9pn7p" podStartSLOduration=3.007406155 podStartE2EDuration="5.746393185s" podCreationTimestamp="2025-12-02 17:04:05 +0000 UTC" firstStartedPulling="2025-12-02 17:04:06.664172547 +0000 UTC m=+1277.191061296" lastFinishedPulling="2025-12-02 17:04:09.403159587 +0000 UTC m=+1279.930048326" observedRunningTime="2025-12-02 17:04:10.744362147 +0000 UTC m=+1281.271250896" watchObservedRunningTime="2025-12-02 17:04:10.746393185 +0000 UTC m=+1281.273281934" Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.754758 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5","Type":"ContainerStarted","Data":"492b803d1e491b99c76c810cf3c181e7383b6a0c2a0aa1405ab7bcca8fd2d3b7"} Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.793434 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-lqspp" event={"ID":"2ec58fad-2ebd-4536-ae98-1f4537cbbffd","Type":"ContainerStarted","Data":"6486edc5f982a0b8b8e8a64c9a7addb1d98d3309aabf7682782ec81ad54cb04e"} Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.793629 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.805238 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-57f999f495-w8tt5" event={"ID":"65391885-7b23-49fc-b435-ea21781c2f4d","Type":"ContainerStarted","Data":"af8bbb933c5329286011aadd354bc431bccd30c979797501bb6119f753c451c1"} Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.805323 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-57f999f495-w8tt5" event={"ID":"65391885-7b23-49fc-b435-ea21781c2f4d","Type":"ContainerStarted","Data":"9aafe30d4bf120994af7a774b6246d62c74259a0a0fb130a81af5db4e8a21f7d"} Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.826676 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"315b434d-0877-4071-9e78-0b5b659edc47","Type":"ContainerStarted","Data":"5b321f251c22b5cceba19259b2c91f58425efdf0fbfb3d8503d06df8b8a7f6e7"} Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.828783 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6b66855878-jbxkj" podStartSLOduration=5.828764478 podStartE2EDuration="5.828764478s" podCreationTimestamp="2025-12-02 17:04:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:04:10.780458754 +0000 UTC m=+1281.307347503" watchObservedRunningTime="2025-12-02 17:04:10.828764478 +0000 UTC m=+1281.355653227" Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.830783 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.830810756 podStartE2EDuration="6.830775986s" podCreationTimestamp="2025-12-02 17:04:04 +0000 UTC" firstStartedPulling="2025-12-02 17:04:06.25165279 +0000 UTC m=+1276.778541539" lastFinishedPulling="2025-12-02 17:04:07.25161802 +0000 UTC m=+1277.778506769" observedRunningTime="2025-12-02 17:04:10.803338125 +0000 UTC m=+1281.330226874" watchObservedRunningTime="2025-12-02 17:04:10.830775986 +0000 UTC m=+1281.357664735" Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.875118 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5784cf869f-lqspp" podStartSLOduration=5.875093527 podStartE2EDuration="5.875093527s" podCreationTimestamp="2025-12-02 17:04:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:04:10.834682977 +0000 UTC m=+1281.361571726" watchObservedRunningTime="2025-12-02 17:04:10.875093527 +0000 UTC m=+1281.401982276" Dec 02 17:04:10 crc kubenswrapper[4747]: I1202 17:04:10.883339 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-57f999f495-w8tt5" podStartSLOduration=2.970143043 podStartE2EDuration="5.88331063s" podCreationTimestamp="2025-12-02 17:04:05 +0000 UTC" firstStartedPulling="2025-12-02 17:04:06.473025768 +0000 UTC m=+1276.999914517" lastFinishedPulling="2025-12-02 17:04:09.386193355 +0000 UTC m=+1279.913082104" observedRunningTime="2025-12-02 17:04:10.859938525 +0000 UTC m=+1281.386827274" watchObservedRunningTime="2025-12-02 17:04:10.88331063 +0000 UTC m=+1281.410199379" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.699411 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6c8c69f76b-hxmzj"] Dec 02 17:04:11 crc kubenswrapper[4747]: E1202 17:04:11.700676 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="797476c5-9530-40d5-87cb-58125f967fe6" containerName="init" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.700705 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="797476c5-9530-40d5-87cb-58125f967fe6" containerName="init" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.700945 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="797476c5-9530-40d5-87cb-58125f967fe6" containerName="init" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.702125 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.705026 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.715422 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.722618 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6c8c69f76b-hxmzj"] Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.789588 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="797476c5-9530-40d5-87cb-58125f967fe6" path="/var/lib/kubelet/pods/797476c5-9530-40d5-87cb-58125f967fe6/volumes" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.814238 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b329ab1-0ac2-4758-b497-7650406ab087-public-tls-certs\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.814343 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b329ab1-0ac2-4758-b497-7650406ab087-combined-ca-bundle\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.814378 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b329ab1-0ac2-4758-b497-7650406ab087-config-data\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.814483 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b329ab1-0ac2-4758-b497-7650406ab087-config-data-custom\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.814522 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b329ab1-0ac2-4758-b497-7650406ab087-logs\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.814537 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr4jb\" (UniqueName: \"kubernetes.io/projected/6b329ab1-0ac2-4758-b497-7650406ab087-kube-api-access-zr4jb\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.814564 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b329ab1-0ac2-4758-b497-7650406ab087-internal-tls-certs\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.847377 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"315b434d-0877-4071-9e78-0b5b659edc47","Type":"ContainerStarted","Data":"ba5d57c2843b8df50efd299caaa9f7f100f1e4501913030e6110c93a4cc59a80"} Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.859974 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="6430c7e8-1065-422d-b512-28f3c2356d6a" containerName="cinder-api-log" containerID="cri-o://7cabf4deb964e785602c85cb53ed328f1a2bade12a410a391e6a9ce09fd0465a" gracePeriod=30 Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.860305 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="6430c7e8-1065-422d-b512-28f3c2356d6a" containerName="cinder-api" containerID="cri-o://a2c36de906d0621d1f04568445e1a2d2ab9283003b554abcc6a9329747174f2f" gracePeriod=30 Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.860467 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6430c7e8-1065-422d-b512-28f3c2356d6a","Type":"ContainerStarted","Data":"a2c36de906d0621d1f04568445e1a2d2ab9283003b554abcc6a9329747174f2f"} Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.860976 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.899155 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.899132503 podStartE2EDuration="6.899132503s" podCreationTimestamp="2025-12-02 17:04:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:04:11.894766339 +0000 UTC m=+1282.421655078" watchObservedRunningTime="2025-12-02 17:04:11.899132503 +0000 UTC m=+1282.426021252" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.916531 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b329ab1-0ac2-4758-b497-7650406ab087-public-tls-certs\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.916703 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b329ab1-0ac2-4758-b497-7650406ab087-combined-ca-bundle\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.916754 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b329ab1-0ac2-4758-b497-7650406ab087-config-data\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.916842 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b329ab1-0ac2-4758-b497-7650406ab087-config-data-custom\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.916889 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr4jb\" (UniqueName: \"kubernetes.io/projected/6b329ab1-0ac2-4758-b497-7650406ab087-kube-api-access-zr4jb\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.916923 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b329ab1-0ac2-4758-b497-7650406ab087-logs\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.916950 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b329ab1-0ac2-4758-b497-7650406ab087-internal-tls-certs\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.920309 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b329ab1-0ac2-4758-b497-7650406ab087-logs\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.924583 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b329ab1-0ac2-4758-b497-7650406ab087-public-tls-certs\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.925370 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b329ab1-0ac2-4758-b497-7650406ab087-internal-tls-certs\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.933594 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b329ab1-0ac2-4758-b497-7650406ab087-config-data-custom\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.938655 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b329ab1-0ac2-4758-b497-7650406ab087-combined-ca-bundle\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.944977 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b329ab1-0ac2-4758-b497-7650406ab087-config-data\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:11 crc kubenswrapper[4747]: I1202 17:04:11.953176 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr4jb\" (UniqueName: \"kubernetes.io/projected/6b329ab1-0ac2-4758-b497-7650406ab087-kube-api-access-zr4jb\") pod \"barbican-api-6c8c69f76b-hxmzj\" (UID: \"6b329ab1-0ac2-4758-b497-7650406ab087\") " pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:12 crc kubenswrapper[4747]: I1202 17:04:12.026940 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:12 crc kubenswrapper[4747]: I1202 17:04:12.602483 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6c8c69f76b-hxmzj"] Dec 02 17:04:12 crc kubenswrapper[4747]: W1202 17:04:12.629095 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b329ab1_0ac2_4758_b497_7650406ab087.slice/crio-d2cf0f90507ab5ebe79b21e55ddfd3a9dd918fe26dc175374cf1e40d0c1aff31 WatchSource:0}: Error finding container d2cf0f90507ab5ebe79b21e55ddfd3a9dd918fe26dc175374cf1e40d0c1aff31: Status 404 returned error can't find the container with id d2cf0f90507ab5ebe79b21e55ddfd3a9dd918fe26dc175374cf1e40d0c1aff31 Dec 02 17:04:12 crc kubenswrapper[4747]: I1202 17:04:12.870293 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"315b434d-0877-4071-9e78-0b5b659edc47","Type":"ContainerStarted","Data":"37ab0a92fa677769f6ab910c1175163cc105f8a08c7579d88d594b2530d61f44"} Dec 02 17:04:12 crc kubenswrapper[4747]: I1202 17:04:12.871766 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6c8c69f76b-hxmzj" event={"ID":"6b329ab1-0ac2-4758-b497-7650406ab087","Type":"ContainerStarted","Data":"d2cf0f90507ab5ebe79b21e55ddfd3a9dd918fe26dc175374cf1e40d0c1aff31"} Dec 02 17:04:12 crc kubenswrapper[4747]: I1202 17:04:12.874397 4747 generic.go:334] "Generic (PLEG): container finished" podID="6430c7e8-1065-422d-b512-28f3c2356d6a" containerID="a2c36de906d0621d1f04568445e1a2d2ab9283003b554abcc6a9329747174f2f" exitCode=0 Dec 02 17:04:12 crc kubenswrapper[4747]: I1202 17:04:12.874440 4747 generic.go:334] "Generic (PLEG): container finished" podID="6430c7e8-1065-422d-b512-28f3c2356d6a" containerID="7cabf4deb964e785602c85cb53ed328f1a2bade12a410a391e6a9ce09fd0465a" exitCode=143 Dec 02 17:04:12 crc kubenswrapper[4747]: I1202 17:04:12.874477 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6430c7e8-1065-422d-b512-28f3c2356d6a","Type":"ContainerDied","Data":"a2c36de906d0621d1f04568445e1a2d2ab9283003b554abcc6a9329747174f2f"} Dec 02 17:04:12 crc kubenswrapper[4747]: I1202 17:04:12.874521 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6430c7e8-1065-422d-b512-28f3c2356d6a","Type":"ContainerDied","Data":"7cabf4deb964e785602c85cb53ed328f1a2bade12a410a391e6a9ce09fd0465a"} Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.346147 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.449572 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6430c7e8-1065-422d-b512-28f3c2356d6a-logs\") pod \"6430c7e8-1065-422d-b512-28f3c2356d6a\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.449894 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-config-data-custom\") pod \"6430c7e8-1065-422d-b512-28f3c2356d6a\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.450030 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-combined-ca-bundle\") pod \"6430c7e8-1065-422d-b512-28f3c2356d6a\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.450050 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk6zf\" (UniqueName: \"kubernetes.io/projected/6430c7e8-1065-422d-b512-28f3c2356d6a-kube-api-access-tk6zf\") pod \"6430c7e8-1065-422d-b512-28f3c2356d6a\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.450085 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-scripts\") pod \"6430c7e8-1065-422d-b512-28f3c2356d6a\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.450122 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-config-data\") pod \"6430c7e8-1065-422d-b512-28f3c2356d6a\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.450185 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6430c7e8-1065-422d-b512-28f3c2356d6a-etc-machine-id\") pod \"6430c7e8-1065-422d-b512-28f3c2356d6a\" (UID: \"6430c7e8-1065-422d-b512-28f3c2356d6a\") " Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.450296 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6430c7e8-1065-422d-b512-28f3c2356d6a-logs" (OuterVolumeSpecName: "logs") pod "6430c7e8-1065-422d-b512-28f3c2356d6a" (UID: "6430c7e8-1065-422d-b512-28f3c2356d6a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.451941 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6430c7e8-1065-422d-b512-28f3c2356d6a-logs\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.452320 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6430c7e8-1065-422d-b512-28f3c2356d6a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "6430c7e8-1065-422d-b512-28f3c2356d6a" (UID: "6430c7e8-1065-422d-b512-28f3c2356d6a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.459423 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-scripts" (OuterVolumeSpecName: "scripts") pod "6430c7e8-1065-422d-b512-28f3c2356d6a" (UID: "6430c7e8-1065-422d-b512-28f3c2356d6a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.461703 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6430c7e8-1065-422d-b512-28f3c2356d6a-kube-api-access-tk6zf" (OuterVolumeSpecName: "kube-api-access-tk6zf") pod "6430c7e8-1065-422d-b512-28f3c2356d6a" (UID: "6430c7e8-1065-422d-b512-28f3c2356d6a"). InnerVolumeSpecName "kube-api-access-tk6zf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.483038 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6430c7e8-1065-422d-b512-28f3c2356d6a" (UID: "6430c7e8-1065-422d-b512-28f3c2356d6a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.510915 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6430c7e8-1065-422d-b512-28f3c2356d6a" (UID: "6430c7e8-1065-422d-b512-28f3c2356d6a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.513520 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-config-data" (OuterVolumeSpecName: "config-data") pod "6430c7e8-1065-422d-b512-28f3c2356d6a" (UID: "6430c7e8-1065-422d-b512-28f3c2356d6a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.555362 4747 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6430c7e8-1065-422d-b512-28f3c2356d6a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.555404 4747 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.555415 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.555425 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk6zf\" (UniqueName: \"kubernetes.io/projected/6430c7e8-1065-422d-b512-28f3c2356d6a-kube-api-access-tk6zf\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.555438 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.555447 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6430c7e8-1065-422d-b512-28f3c2356d6a-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.885705 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"315b434d-0877-4071-9e78-0b5b659edc47","Type":"ContainerStarted","Data":"aa330fbeca86b4a7e7074394ff3bc9a06b56ae4abff1920a3fafe4bc2822ba12"} Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.887869 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6c8c69f76b-hxmzj" event={"ID":"6b329ab1-0ac2-4758-b497-7650406ab087","Type":"ContainerStarted","Data":"bc05992119ff33db9d2418bf5c889de59b5c45de94a5fc65f41017284d3949e4"} Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.887930 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6c8c69f76b-hxmzj" event={"ID":"6b329ab1-0ac2-4758-b497-7650406ab087","Type":"ContainerStarted","Data":"edab4b41e6f03704729968061fdf6b3ed35022c3ec0b981f42867bc48f895c4c"} Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.889451 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.889482 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.891998 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6430c7e8-1065-422d-b512-28f3c2356d6a","Type":"ContainerDied","Data":"c1ca1daf1437fa31d806279db8a08149602ca29a92914565f9793bdab4d274a7"} Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.892071 4747 scope.go:117] "RemoveContainer" containerID="a2c36de906d0621d1f04568445e1a2d2ab9283003b554abcc6a9329747174f2f" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.892264 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.919843 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6c8c69f76b-hxmzj" podStartSLOduration=2.919820857 podStartE2EDuration="2.919820857s" podCreationTimestamp="2025-12-02 17:04:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:04:13.909355309 +0000 UTC m=+1284.436244058" watchObservedRunningTime="2025-12-02 17:04:13.919820857 +0000 UTC m=+1284.446709596" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.941094 4747 scope.go:117] "RemoveContainer" containerID="7cabf4deb964e785602c85cb53ed328f1a2bade12a410a391e6a9ce09fd0465a" Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.962826 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 02 17:04:13 crc kubenswrapper[4747]: I1202 17:04:13.981737 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:13.999880 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 02 17:04:14 crc kubenswrapper[4747]: E1202 17:04:14.000450 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6430c7e8-1065-422d-b512-28f3c2356d6a" containerName="cinder-api-log" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.000470 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6430c7e8-1065-422d-b512-28f3c2356d6a" containerName="cinder-api-log" Dec 02 17:04:14 crc kubenswrapper[4747]: E1202 17:04:14.000499 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6430c7e8-1065-422d-b512-28f3c2356d6a" containerName="cinder-api" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.000509 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6430c7e8-1065-422d-b512-28f3c2356d6a" containerName="cinder-api" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.000804 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6430c7e8-1065-422d-b512-28f3c2356d6a" containerName="cinder-api-log" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.000830 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6430c7e8-1065-422d-b512-28f3c2356d6a" containerName="cinder-api" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.002084 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.009048 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.009319 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.009595 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.017543 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.066357 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7qdx\" (UniqueName: \"kubernetes.io/projected/7ae9edb9-7743-4454-adcf-86c0c8587943-kube-api-access-s7qdx\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.066474 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ae9edb9-7743-4454-adcf-86c0c8587943-logs\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.066507 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.066545 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-config-data\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.066592 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.066612 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-scripts\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.066628 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-public-tls-certs\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.066673 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7ae9edb9-7743-4454-adcf-86c0c8587943-etc-machine-id\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.066693 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-config-data-custom\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.168010 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-config-data\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.168098 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.168131 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-scripts\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.168149 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-public-tls-certs\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.168193 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7ae9edb9-7743-4454-adcf-86c0c8587943-etc-machine-id\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.168212 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-config-data-custom\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.168243 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7qdx\" (UniqueName: \"kubernetes.io/projected/7ae9edb9-7743-4454-adcf-86c0c8587943-kube-api-access-s7qdx\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.168294 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ae9edb9-7743-4454-adcf-86c0c8587943-logs\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.168317 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.169638 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7ae9edb9-7743-4454-adcf-86c0c8587943-etc-machine-id\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.170298 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ae9edb9-7743-4454-adcf-86c0c8587943-logs\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.174369 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.174411 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-public-tls-certs\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.174527 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-config-data-custom\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.176964 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-scripts\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.178825 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-config-data\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.186800 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ae9edb9-7743-4454-adcf-86c0c8587943-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.188748 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7qdx\" (UniqueName: \"kubernetes.io/projected/7ae9edb9-7743-4454-adcf-86c0c8587943-kube-api-access-s7qdx\") pod \"cinder-api-0\" (UID: \"7ae9edb9-7743-4454-adcf-86c0c8587943\") " pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.323624 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.802849 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:14 crc kubenswrapper[4747]: W1202 17:04:14.845453 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ae9edb9_7743_4454_adcf_86c0c8587943.slice/crio-81f7249c78b753e69e5447900631452127e260295a8fcbb7541c808fc7ffa52b WatchSource:0}: Error finding container 81f7249c78b753e69e5447900631452127e260295a8fcbb7541c808fc7ffa52b: Status 404 returned error can't find the container with id 81f7249c78b753e69e5447900631452127e260295a8fcbb7541c808fc7ffa52b Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.918535 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.920802 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7ae9edb9-7743-4454-adcf-86c0c8587943","Type":"ContainerStarted","Data":"81f7249c78b753e69e5447900631452127e260295a8fcbb7541c808fc7ffa52b"} Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.950499 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"315b434d-0877-4071-9e78-0b5b659edc47","Type":"ContainerStarted","Data":"b8b8713752b49db1c5233fa67badc187da764a15df397387e0518ee944f733b1"} Dec 02 17:04:14 crc kubenswrapper[4747]: I1202 17:04:14.952391 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 17:04:15 crc kubenswrapper[4747]: I1202 17:04:15.029891 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.718627336 podStartE2EDuration="7.029863589s" podCreationTimestamp="2025-12-02 17:04:08 +0000 UTC" firstStartedPulling="2025-12-02 17:04:09.988923434 +0000 UTC m=+1280.515812193" lastFinishedPulling="2025-12-02 17:04:14.300159707 +0000 UTC m=+1284.827048446" observedRunningTime="2025-12-02 17:04:15.021296755 +0000 UTC m=+1285.548185504" watchObservedRunningTime="2025-12-02 17:04:15.029863589 +0000 UTC m=+1285.556752338" Dec 02 17:04:15 crc kubenswrapper[4747]: I1202 17:04:15.229262 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:04:15 crc kubenswrapper[4747]: I1202 17:04:15.390947 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 02 17:04:15 crc kubenswrapper[4747]: I1202 17:04:15.772051 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6430c7e8-1065-422d-b512-28f3c2356d6a" path="/var/lib/kubelet/pods/6430c7e8-1065-422d-b512-28f3c2356d6a/volumes" Dec 02 17:04:15 crc kubenswrapper[4747]: I1202 17:04:15.826706 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 02 17:04:16 crc kubenswrapper[4747]: I1202 17:04:16.040178 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:04:16 crc kubenswrapper[4747]: I1202 17:04:16.064259 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 17:04:16 crc kubenswrapper[4747]: I1202 17:04:16.136037 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-h9fhb"] Dec 02 17:04:16 crc kubenswrapper[4747]: I1202 17:04:16.136397 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" podUID="7e213849-808c-4275-971d-daf77f6ea6f3" containerName="dnsmasq-dns" containerID="cri-o://09ea91fdbdb74f45443e7525c1760c00352c491daab21e8ce291b82719bfdf7e" gracePeriod=10 Dec 02 17:04:16 crc kubenswrapper[4747]: I1202 17:04:16.977382 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5" containerName="cinder-scheduler" containerID="cri-o://7b5e91aba3e47ff9c6eff02b4ee9857b921c5898450a7c5caf06adb3d4c51869" gracePeriod=30 Dec 02 17:04:16 crc kubenswrapper[4747]: I1202 17:04:16.977461 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5" containerName="probe" containerID="cri-o://492b803d1e491b99c76c810cf3c181e7383b6a0c2a0aa1405ab7bcca8fd2d3b7" gracePeriod=30 Dec 02 17:04:17 crc kubenswrapper[4747]: I1202 17:04:17.975454 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:04:17 crc kubenswrapper[4747]: I1202 17:04:17.986760 4747 generic.go:334] "Generic (PLEG): container finished" podID="7e213849-808c-4275-971d-daf77f6ea6f3" containerID="09ea91fdbdb74f45443e7525c1760c00352c491daab21e8ce291b82719bfdf7e" exitCode=0 Dec 02 17:04:17 crc kubenswrapper[4747]: I1202 17:04:17.986810 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" event={"ID":"7e213849-808c-4275-971d-daf77f6ea6f3","Type":"ContainerDied","Data":"09ea91fdbdb74f45443e7525c1760c00352c491daab21e8ce291b82719bfdf7e"} Dec 02 17:04:17 crc kubenswrapper[4747]: I1202 17:04:17.986818 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" Dec 02 17:04:17 crc kubenswrapper[4747]: I1202 17:04:17.986891 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-h9fhb" event={"ID":"7e213849-808c-4275-971d-daf77f6ea6f3","Type":"ContainerDied","Data":"3f5b6a82e4dc37df0d860de7ee1be051db71335e2869be77cc775a995c9c1af9"} Dec 02 17:04:17 crc kubenswrapper[4747]: I1202 17:04:17.987027 4747 scope.go:117] "RemoveContainer" containerID="09ea91fdbdb74f45443e7525c1760c00352c491daab21e8ce291b82719bfdf7e" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.009076 4747 scope.go:117] "RemoveContainer" containerID="002c7cc7881e184e97af1a91734c5ac2233684c17434f3e291f8a59f3683110c" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.079201 4747 scope.go:117] "RemoveContainer" containerID="09ea91fdbdb74f45443e7525c1760c00352c491daab21e8ce291b82719bfdf7e" Dec 02 17:04:18 crc kubenswrapper[4747]: E1202 17:04:18.084196 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09ea91fdbdb74f45443e7525c1760c00352c491daab21e8ce291b82719bfdf7e\": container with ID starting with 09ea91fdbdb74f45443e7525c1760c00352c491daab21e8ce291b82719bfdf7e not found: ID does not exist" containerID="09ea91fdbdb74f45443e7525c1760c00352c491daab21e8ce291b82719bfdf7e" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.084246 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09ea91fdbdb74f45443e7525c1760c00352c491daab21e8ce291b82719bfdf7e"} err="failed to get container status \"09ea91fdbdb74f45443e7525c1760c00352c491daab21e8ce291b82719bfdf7e\": rpc error: code = NotFound desc = could not find container \"09ea91fdbdb74f45443e7525c1760c00352c491daab21e8ce291b82719bfdf7e\": container with ID starting with 09ea91fdbdb74f45443e7525c1760c00352c491daab21e8ce291b82719bfdf7e not found: ID does not exist" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.084319 4747 scope.go:117] "RemoveContainer" containerID="002c7cc7881e184e97af1a91734c5ac2233684c17434f3e291f8a59f3683110c" Dec 02 17:04:18 crc kubenswrapper[4747]: E1202 17:04:18.087197 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"002c7cc7881e184e97af1a91734c5ac2233684c17434f3e291f8a59f3683110c\": container with ID starting with 002c7cc7881e184e97af1a91734c5ac2233684c17434f3e291f8a59f3683110c not found: ID does not exist" containerID="002c7cc7881e184e97af1a91734c5ac2233684c17434f3e291f8a59f3683110c" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.087230 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"002c7cc7881e184e97af1a91734c5ac2233684c17434f3e291f8a59f3683110c"} err="failed to get container status \"002c7cc7881e184e97af1a91734c5ac2233684c17434f3e291f8a59f3683110c\": rpc error: code = NotFound desc = could not find container \"002c7cc7881e184e97af1a91734c5ac2233684c17434f3e291f8a59f3683110c\": container with ID starting with 002c7cc7881e184e97af1a91734c5ac2233684c17434f3e291f8a59f3683110c not found: ID does not exist" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.090071 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-config\") pod \"7e213849-808c-4275-971d-daf77f6ea6f3\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.090172 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fh7xd\" (UniqueName: \"kubernetes.io/projected/7e213849-808c-4275-971d-daf77f6ea6f3-kube-api-access-fh7xd\") pod \"7e213849-808c-4275-971d-daf77f6ea6f3\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.090232 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-dns-svc\") pod \"7e213849-808c-4275-971d-daf77f6ea6f3\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.090296 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-ovsdbserver-sb\") pod \"7e213849-808c-4275-971d-daf77f6ea6f3\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.090331 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-dns-swift-storage-0\") pod \"7e213849-808c-4275-971d-daf77f6ea6f3\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.090405 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-ovsdbserver-nb\") pod \"7e213849-808c-4275-971d-daf77f6ea6f3\" (UID: \"7e213849-808c-4275-971d-daf77f6ea6f3\") " Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.187245 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e213849-808c-4275-971d-daf77f6ea6f3-kube-api-access-fh7xd" (OuterVolumeSpecName: "kube-api-access-fh7xd") pod "7e213849-808c-4275-971d-daf77f6ea6f3" (UID: "7e213849-808c-4275-971d-daf77f6ea6f3"). InnerVolumeSpecName "kube-api-access-fh7xd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.190008 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-config" (OuterVolumeSpecName: "config") pod "7e213849-808c-4275-971d-daf77f6ea6f3" (UID: "7e213849-808c-4275-971d-daf77f6ea6f3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.193493 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fh7xd\" (UniqueName: \"kubernetes.io/projected/7e213849-808c-4275-971d-daf77f6ea6f3-kube-api-access-fh7xd\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.193516 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.267330 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7b56d86469-kh76t" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.268434 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.273692 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7e213849-808c-4275-971d-daf77f6ea6f3" (UID: "7e213849-808c-4275-971d-daf77f6ea6f3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.286541 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6b8f479dd4-zx826" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.289562 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7e213849-808c-4275-971d-daf77f6ea6f3" (UID: "7e213849-808c-4275-971d-daf77f6ea6f3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.304369 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7e213849-808c-4275-971d-daf77f6ea6f3" (UID: "7e213849-808c-4275-971d-daf77f6ea6f3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.304487 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.304653 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.321357 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7e213849-808c-4275-971d-daf77f6ea6f3" (UID: "7e213849-808c-4275-971d-daf77f6ea6f3"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.406376 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.406445 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e213849-808c-4275-971d-daf77f6ea6f3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.415093 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.422523 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7f469b9bd4-z45tg"] Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.422828 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7f469b9bd4-z45tg" podUID="d055ad7e-75e6-4ce7-ac6b-c966057f8ace" containerName="neutron-api" containerID="cri-o://b837a0ad3161a36cf803b3662cb42735f758d0f967d8cf64e6c6aee19007a45b" gracePeriod=30 Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.422993 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7f469b9bd4-z45tg" podUID="d055ad7e-75e6-4ce7-ac6b-c966057f8ace" containerName="neutron-httpd" containerID="cri-o://f6caf9ad107a550ea6207e6149e40be0d2d3ad5470664cdee598e2e0be45e0a7" gracePeriod=30 Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.505229 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.679917 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-h9fhb"] Dec 02 17:04:18 crc kubenswrapper[4747]: I1202 17:04:18.711024 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-h9fhb"] Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.032311 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7ae9edb9-7743-4454-adcf-86c0c8587943","Type":"ContainerStarted","Data":"e5eeaa294544481a8e894121a5bd2e9fd6eaf089d4a5baec8aef4f9d4463831a"} Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.055464 4747 generic.go:334] "Generic (PLEG): container finished" podID="d055ad7e-75e6-4ce7-ac6b-c966057f8ace" containerID="f6caf9ad107a550ea6207e6149e40be0d2d3ad5470664cdee598e2e0be45e0a7" exitCode=0 Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.055554 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7f469b9bd4-z45tg" event={"ID":"d055ad7e-75e6-4ce7-ac6b-c966057f8ace","Type":"ContainerDied","Data":"f6caf9ad107a550ea6207e6149e40be0d2d3ad5470664cdee598e2e0be45e0a7"} Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.106141 4747 generic.go:334] "Generic (PLEG): container finished" podID="ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5" containerID="492b803d1e491b99c76c810cf3c181e7383b6a0c2a0aa1405ab7bcca8fd2d3b7" exitCode=0 Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.106531 4747 generic.go:334] "Generic (PLEG): container finished" podID="ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5" containerID="7b5e91aba3e47ff9c6eff02b4ee9857b921c5898450a7c5caf06adb3d4c51869" exitCode=0 Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.106228 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5","Type":"ContainerDied","Data":"492b803d1e491b99c76c810cf3c181e7383b6a0c2a0aa1405ab7bcca8fd2d3b7"} Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.106975 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5","Type":"ContainerDied","Data":"7b5e91aba3e47ff9c6eff02b4ee9857b921c5898450a7c5caf06adb3d4c51869"} Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.516085 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.562881 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6s5n\" (UniqueName: \"kubernetes.io/projected/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-kube-api-access-k6s5n\") pod \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.563077 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-scripts\") pod \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.563171 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-config-data-custom\") pod \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.563197 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-config-data\") pod \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.563279 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-etc-machine-id\") pod \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.563334 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-combined-ca-bundle\") pod \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\" (UID: \"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5\") " Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.567559 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5" (UID: "ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.606585 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-kube-api-access-k6s5n" (OuterVolumeSpecName: "kube-api-access-k6s5n") pod "ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5" (UID: "ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5"). InnerVolumeSpecName "kube-api-access-k6s5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.606892 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-scripts" (OuterVolumeSpecName: "scripts") pod "ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5" (UID: "ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.608361 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5" (UID: "ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.666886 4747 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.666942 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6s5n\" (UniqueName: \"kubernetes.io/projected/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-kube-api-access-k6s5n\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.666959 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.666970 4747 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.705055 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5" (UID: "ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.745351 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.768854 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.774186 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-config-data" (OuterVolumeSpecName: "config-data") pod "ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5" (UID: "ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.854079 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e213849-808c-4275-971d-daf77f6ea6f3" path="/var/lib/kubelet/pods/7e213849-808c-4275-971d-daf77f6ea6f3/volumes" Dec 02 17:04:19 crc kubenswrapper[4747]: I1202 17:04:19.871226 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.123944 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5","Type":"ContainerDied","Data":"04391d3864ae463d975e738a402ea957cb576502de24e82a4cf1aa24150e0f8c"} Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.124033 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.124339 4747 scope.go:117] "RemoveContainer" containerID="492b803d1e491b99c76c810cf3c181e7383b6a0c2a0aa1405ab7bcca8fd2d3b7" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.131703 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7ae9edb9-7743-4454-adcf-86c0c8587943","Type":"ContainerStarted","Data":"9fce151026c255d97679b1e8674618278de195fb2950e672cc3432423a8ed313"} Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.165985 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.176060 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.201451 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 17:04:20 crc kubenswrapper[4747]: E1202 17:04:20.201893 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5" containerName="cinder-scheduler" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.201927 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5" containerName="cinder-scheduler" Dec 02 17:04:20 crc kubenswrapper[4747]: E1202 17:04:20.201942 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5" containerName="probe" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.201949 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5" containerName="probe" Dec 02 17:04:20 crc kubenswrapper[4747]: E1202 17:04:20.201962 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e213849-808c-4275-971d-daf77f6ea6f3" containerName="init" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.201969 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e213849-808c-4275-971d-daf77f6ea6f3" containerName="init" Dec 02 17:04:20 crc kubenswrapper[4747]: E1202 17:04:20.201978 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e213849-808c-4275-971d-daf77f6ea6f3" containerName="dnsmasq-dns" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.201983 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e213849-808c-4275-971d-daf77f6ea6f3" containerName="dnsmasq-dns" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.202192 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5" containerName="cinder-scheduler" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.202207 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e213849-808c-4275-971d-daf77f6ea6f3" containerName="dnsmasq-dns" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.202231 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5" containerName="probe" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.203316 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.206242 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=7.206219309 podStartE2EDuration="7.206219309s" podCreationTimestamp="2025-12-02 17:04:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:04:20.197422368 +0000 UTC m=+1290.724311147" watchObservedRunningTime="2025-12-02 17:04:20.206219309 +0000 UTC m=+1290.733108058" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.206796 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.247503 4747 scope.go:117] "RemoveContainer" containerID="7b5e91aba3e47ff9c6eff02b4ee9857b921c5898450a7c5caf06adb3d4c51869" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.293553 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.308359 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.308405 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.308475 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-config-data\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.308535 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-scripts\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.308567 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.308626 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kj9l9\" (UniqueName: \"kubernetes.io/projected/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-kube-api-access-kj9l9\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.413196 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-config-data\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.413316 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-scripts\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.413360 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.413435 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kj9l9\" (UniqueName: \"kubernetes.io/projected/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-kube-api-access-kj9l9\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.413493 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.413490 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.413526 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.429137 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-scripts\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.430990 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.435011 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-config-data\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.439605 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.448540 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kj9l9\" (UniqueName: \"kubernetes.io/projected/0524feb0-bce7-45c4-8cd9-38b439a4a2bb-kube-api-access-kj9l9\") pod \"cinder-scheduler-0\" (UID: \"0524feb0-bce7-45c4-8cd9-38b439a4a2bb\") " pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.554611 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 17:04:20 crc kubenswrapper[4747]: I1202 17:04:20.722420 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:21 crc kubenswrapper[4747]: I1202 17:04:21.159203 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 02 17:04:21 crc kubenswrapper[4747]: I1202 17:04:21.264870 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 17:04:21 crc kubenswrapper[4747]: I1202 17:04:21.785657 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5" path="/var/lib/kubelet/pods/ec73f8a4-607c-4fd0-ad3d-c6b466e4ada5/volumes" Dec 02 17:04:22 crc kubenswrapper[4747]: I1202 17:04:22.173982 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0524feb0-bce7-45c4-8cd9-38b439a4a2bb","Type":"ContainerStarted","Data":"0d4f97da51b4e977758f5dcb3cb6f476016caf5f3808a18e57df42a6f8ae3c34"} Dec 02 17:04:22 crc kubenswrapper[4747]: I1202 17:04:22.174490 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0524feb0-bce7-45c4-8cd9-38b439a4a2bb","Type":"ContainerStarted","Data":"02e160b61c4fb670b99072f534027c3f1460d1012acb8aea9fd5270e2d7c35b6"} Dec 02 17:04:22 crc kubenswrapper[4747]: I1202 17:04:22.175442 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7c85649748-scrqf" Dec 02 17:04:22 crc kubenswrapper[4747]: I1202 17:04:22.275610 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:04:22 crc kubenswrapper[4747]: I1202 17:04:22.278635 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5cc747446d-fcnwd"] Dec 02 17:04:23 crc kubenswrapper[4747]: I1202 17:04:23.191442 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0524feb0-bce7-45c4-8cd9-38b439a4a2bb","Type":"ContainerStarted","Data":"2de58623efafd4fd516ca1a23107c8e6b8df8ffb1882ae9ce50a23237b3984db"} Dec 02 17:04:23 crc kubenswrapper[4747]: I1202 17:04:23.217485 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6c8c69f76b-hxmzj" Dec 02 17:04:23 crc kubenswrapper[4747]: I1202 17:04:23.219248 4747 generic.go:334] "Generic (PLEG): container finished" podID="d055ad7e-75e6-4ce7-ac6b-c966057f8ace" containerID="b837a0ad3161a36cf803b3662cb42735f758d0f967d8cf64e6c6aee19007a45b" exitCode=0 Dec 02 17:04:23 crc kubenswrapper[4747]: I1202 17:04:23.219374 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7f469b9bd4-z45tg" event={"ID":"d055ad7e-75e6-4ce7-ac6b-c966057f8ace","Type":"ContainerDied","Data":"b837a0ad3161a36cf803b3662cb42735f758d0f967d8cf64e6c6aee19007a45b"} Dec 02 17:04:23 crc kubenswrapper[4747]: I1202 17:04:23.219451 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5cc747446d-fcnwd" podUID="b26d33d5-1b96-470b-8677-cb5273c72d25" containerName="horizon-log" containerID="cri-o://a23dd2f8d5c9c22dba3575c453fe1e808eefeb21f0eec73a7710b1121e1f288b" gracePeriod=30 Dec 02 17:04:23 crc kubenswrapper[4747]: I1202 17:04:23.219561 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5cc747446d-fcnwd" podUID="b26d33d5-1b96-470b-8677-cb5273c72d25" containerName="horizon" containerID="cri-o://92cc25f582af9f955108070de723a136742482bbeaa439305c18cea41ec4b329" gracePeriod=30 Dec 02 17:04:23 crc kubenswrapper[4747]: I1202 17:04:23.221893 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.221871221 podStartE2EDuration="3.221871221s" podCreationTimestamp="2025-12-02 17:04:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:04:23.220559383 +0000 UTC m=+1293.747448142" watchObservedRunningTime="2025-12-02 17:04:23.221871221 +0000 UTC m=+1293.748759970" Dec 02 17:04:23 crc kubenswrapper[4747]: I1202 17:04:23.297328 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6b66855878-jbxkj"] Dec 02 17:04:23 crc kubenswrapper[4747]: I1202 17:04:23.297589 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6b66855878-jbxkj" podUID="7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" containerName="barbican-api-log" containerID="cri-o://bd7cecf7661738a2db4fd91fc0e6317f281a3932dd1a5cc1e928e1b669d66716" gracePeriod=30 Dec 02 17:04:23 crc kubenswrapper[4747]: I1202 17:04:23.298121 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6b66855878-jbxkj" podUID="7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" containerName="barbican-api" containerID="cri-o://2aa18e15f149a7e37311e561636a6c25013d4ac84b19719e87849843f4bdb0f5" gracePeriod=30 Dec 02 17:04:23 crc kubenswrapper[4747]: I1202 17:04:23.315466 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6b66855878-jbxkj" podUID="7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": EOF" Dec 02 17:04:23 crc kubenswrapper[4747]: I1202 17:04:23.838447 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.043948 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-combined-ca-bundle\") pod \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.044262 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s74jd\" (UniqueName: \"kubernetes.io/projected/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-kube-api-access-s74jd\") pod \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.044357 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-ovndb-tls-certs\") pod \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.044380 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-httpd-config\") pod \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.044403 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-config\") pod \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\" (UID: \"d055ad7e-75e6-4ce7-ac6b-c966057f8ace\") " Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.084239 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "d055ad7e-75e6-4ce7-ac6b-c966057f8ace" (UID: "d055ad7e-75e6-4ce7-ac6b-c966057f8ace"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.091132 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-kube-api-access-s74jd" (OuterVolumeSpecName: "kube-api-access-s74jd") pod "d055ad7e-75e6-4ce7-ac6b-c966057f8ace" (UID: "d055ad7e-75e6-4ce7-ac6b-c966057f8ace"). InnerVolumeSpecName "kube-api-access-s74jd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.155183 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s74jd\" (UniqueName: \"kubernetes.io/projected/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-kube-api-access-s74jd\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.155225 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.342541 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-config" (OuterVolumeSpecName: "config") pod "d055ad7e-75e6-4ce7-ac6b-c966057f8ace" (UID: "d055ad7e-75e6-4ce7-ac6b-c966057f8ace"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.345313 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.381153 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d055ad7e-75e6-4ce7-ac6b-c966057f8ace" (UID: "d055ad7e-75e6-4ce7-ac6b-c966057f8ace"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.404839 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "d055ad7e-75e6-4ce7-ac6b-c966057f8ace" (UID: "d055ad7e-75e6-4ce7-ac6b-c966057f8ace"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.414729 4747 generic.go:334] "Generic (PLEG): container finished" podID="7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" containerID="bd7cecf7661738a2db4fd91fc0e6317f281a3932dd1a5cc1e928e1b669d66716" exitCode=143 Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.414851 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b66855878-jbxkj" event={"ID":"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15","Type":"ContainerDied","Data":"bd7cecf7661738a2db4fd91fc0e6317f281a3932dd1a5cc1e928e1b669d66716"} Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.439086 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7f469b9bd4-z45tg" Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.440298 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7f469b9bd4-z45tg" event={"ID":"d055ad7e-75e6-4ce7-ac6b-c966057f8ace","Type":"ContainerDied","Data":"c540ef0b8bed9a6a8f48f394c68393a23d52e0dfc7cee1248c8934affa9077cc"} Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.440405 4747 scope.go:117] "RemoveContainer" containerID="f6caf9ad107a550ea6207e6149e40be0d2d3ad5470664cdee598e2e0be45e0a7" Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.447832 4747 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.447870 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d055ad7e-75e6-4ce7-ac6b-c966057f8ace-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.480988 4747 scope.go:117] "RemoveContainer" containerID="b837a0ad3161a36cf803b3662cb42735f758d0f967d8cf64e6c6aee19007a45b" Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.488044 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7f469b9bd4-z45tg"] Dec 02 17:04:24 crc kubenswrapper[4747]: I1202 17:04:24.498523 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7f469b9bd4-z45tg"] Dec 02 17:04:25 crc kubenswrapper[4747]: I1202 17:04:25.615185 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 02 17:04:25 crc kubenswrapper[4747]: I1202 17:04:25.776410 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d055ad7e-75e6-4ce7-ac6b-c966057f8ace" path="/var/lib/kubelet/pods/d055ad7e-75e6-4ce7-ac6b-c966057f8ace/volumes" Dec 02 17:04:26 crc kubenswrapper[4747]: I1202 17:04:26.642785 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5cc747446d-fcnwd" podUID="b26d33d5-1b96-470b-8677-cb5273c72d25" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": read tcp 10.217.0.2:47382->10.217.0.147:8443: read: connection reset by peer" Dec 02 17:04:26 crc kubenswrapper[4747]: I1202 17:04:26.740774 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6b66855878-jbxkj" podUID="7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": read tcp 10.217.0.2:42130->10.217.0.161:9311: read: connection reset by peer" Dec 02 17:04:26 crc kubenswrapper[4747]: I1202 17:04:26.740820 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6b66855878-jbxkj" podUID="7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": read tcp 10.217.0.2:42124->10.217.0.161:9311: read: connection reset by peer" Dec 02 17:04:27 crc kubenswrapper[4747]: I1202 17:04:27.363564 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7d78c5c778-tg55f" Dec 02 17:04:27 crc kubenswrapper[4747]: I1202 17:04:27.483448 4747 generic.go:334] "Generic (PLEG): container finished" podID="7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" containerID="2aa18e15f149a7e37311e561636a6c25013d4ac84b19719e87849843f4bdb0f5" exitCode=0 Dec 02 17:04:27 crc kubenswrapper[4747]: I1202 17:04:27.483568 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b66855878-jbxkj" event={"ID":"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15","Type":"ContainerDied","Data":"2aa18e15f149a7e37311e561636a6c25013d4ac84b19719e87849843f4bdb0f5"} Dec 02 17:04:27 crc kubenswrapper[4747]: I1202 17:04:27.485726 4747 generic.go:334] "Generic (PLEG): container finished" podID="b26d33d5-1b96-470b-8677-cb5273c72d25" containerID="92cc25f582af9f955108070de723a136742482bbeaa439305c18cea41ec4b329" exitCode=0 Dec 02 17:04:27 crc kubenswrapper[4747]: I1202 17:04:27.485757 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cc747446d-fcnwd" event={"ID":"b26d33d5-1b96-470b-8677-cb5273c72d25","Type":"ContainerDied","Data":"92cc25f582af9f955108070de723a136742482bbeaa439305c18cea41ec4b329"} Dec 02 17:04:27 crc kubenswrapper[4747]: I1202 17:04:27.600839 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:27 crc kubenswrapper[4747]: I1202 17:04:27.660789 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zf49h\" (UniqueName: \"kubernetes.io/projected/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-kube-api-access-zf49h\") pod \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " Dec 02 17:04:27 crc kubenswrapper[4747]: I1202 17:04:27.661194 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-combined-ca-bundle\") pod \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " Dec 02 17:04:27 crc kubenswrapper[4747]: I1202 17:04:27.661260 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-config-data-custom\") pod \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " Dec 02 17:04:27 crc kubenswrapper[4747]: I1202 17:04:27.661314 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-config-data\") pod \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " Dec 02 17:04:27 crc kubenswrapper[4747]: I1202 17:04:27.672315 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-kube-api-access-zf49h" (OuterVolumeSpecName: "kube-api-access-zf49h") pod "7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" (UID: "7a3e75c6-fbd8-4c12-9d38-8c9684b13d15"). InnerVolumeSpecName "kube-api-access-zf49h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:27 crc kubenswrapper[4747]: I1202 17:04:27.686080 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" (UID: "7a3e75c6-fbd8-4c12-9d38-8c9684b13d15"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:27 crc kubenswrapper[4747]: I1202 17:04:27.763106 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-logs\") pod \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\" (UID: \"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15\") " Dec 02 17:04:27 crc kubenswrapper[4747]: I1202 17:04:27.763771 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zf49h\" (UniqueName: \"kubernetes.io/projected/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-kube-api-access-zf49h\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:27 crc kubenswrapper[4747]: I1202 17:04:27.763789 4747 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:27 crc kubenswrapper[4747]: I1202 17:04:27.764487 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-logs" (OuterVolumeSpecName: "logs") pod "7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" (UID: "7a3e75c6-fbd8-4c12-9d38-8c9684b13d15"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:04:27 crc kubenswrapper[4747]: I1202 17:04:27.865307 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-logs\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:28 crc kubenswrapper[4747]: I1202 17:04:28.397525 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" (UID: "7a3e75c6-fbd8-4c12-9d38-8c9684b13d15"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:28 crc kubenswrapper[4747]: I1202 17:04:28.406679 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-config-data" (OuterVolumeSpecName: "config-data") pod "7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" (UID: "7a3e75c6-fbd8-4c12-9d38-8c9684b13d15"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:28 crc kubenswrapper[4747]: I1202 17:04:28.480623 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:28 crc kubenswrapper[4747]: I1202 17:04:28.480699 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:28 crc kubenswrapper[4747]: I1202 17:04:28.513188 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b66855878-jbxkj" event={"ID":"7a3e75c6-fbd8-4c12-9d38-8c9684b13d15","Type":"ContainerDied","Data":"54a18bcb0789214cdc0e007c46311015b0b9c90d3af8d9fc418983059ec3f33e"} Dec 02 17:04:28 crc kubenswrapper[4747]: I1202 17:04:28.513259 4747 scope.go:117] "RemoveContainer" containerID="2aa18e15f149a7e37311e561636a6c25013d4ac84b19719e87849843f4bdb0f5" Dec 02 17:04:28 crc kubenswrapper[4747]: I1202 17:04:28.513286 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b66855878-jbxkj" Dec 02 17:04:28 crc kubenswrapper[4747]: I1202 17:04:28.555125 4747 scope.go:117] "RemoveContainer" containerID="bd7cecf7661738a2db4fd91fc0e6317f281a3932dd1a5cc1e928e1b669d66716" Dec 02 17:04:28 crc kubenswrapper[4747]: I1202 17:04:28.555732 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 02 17:04:28 crc kubenswrapper[4747]: I1202 17:04:28.559231 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6b66855878-jbxkj"] Dec 02 17:04:28 crc kubenswrapper[4747]: I1202 17:04:28.569706 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6b66855878-jbxkj"] Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.498459 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 02 17:04:29 crc kubenswrapper[4747]: E1202 17:04:29.499333 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" containerName="barbican-api" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.499354 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" containerName="barbican-api" Dec 02 17:04:29 crc kubenswrapper[4747]: E1202 17:04:29.499379 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" containerName="barbican-api-log" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.499388 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" containerName="barbican-api-log" Dec 02 17:04:29 crc kubenswrapper[4747]: E1202 17:04:29.499410 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d055ad7e-75e6-4ce7-ac6b-c966057f8ace" containerName="neutron-api" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.499417 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d055ad7e-75e6-4ce7-ac6b-c966057f8ace" containerName="neutron-api" Dec 02 17:04:29 crc kubenswrapper[4747]: E1202 17:04:29.499438 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d055ad7e-75e6-4ce7-ac6b-c966057f8ace" containerName="neutron-httpd" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.499444 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d055ad7e-75e6-4ce7-ac6b-c966057f8ace" containerName="neutron-httpd" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.499649 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d055ad7e-75e6-4ce7-ac6b-c966057f8ace" containerName="neutron-httpd" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.499667 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" containerName="barbican-api-log" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.499684 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" containerName="barbican-api" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.499700 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d055ad7e-75e6-4ce7-ac6b-c966057f8ace" containerName="neutron-api" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.502687 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.508464 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.508520 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-mzkrf" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.508464 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.518278 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.602434 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/516f803f-f2d8-46fe-b118-3fb269827dbd-openstack-config-secret\") pod \"openstackclient\" (UID: \"516f803f-f2d8-46fe-b118-3fb269827dbd\") " pod="openstack/openstackclient" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.602530 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/516f803f-f2d8-46fe-b118-3fb269827dbd-openstack-config\") pod \"openstackclient\" (UID: \"516f803f-f2d8-46fe-b118-3fb269827dbd\") " pod="openstack/openstackclient" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.602658 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/516f803f-f2d8-46fe-b118-3fb269827dbd-combined-ca-bundle\") pod \"openstackclient\" (UID: \"516f803f-f2d8-46fe-b118-3fb269827dbd\") " pod="openstack/openstackclient" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.602870 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rktp9\" (UniqueName: \"kubernetes.io/projected/516f803f-f2d8-46fe-b118-3fb269827dbd-kube-api-access-rktp9\") pod \"openstackclient\" (UID: \"516f803f-f2d8-46fe-b118-3fb269827dbd\") " pod="openstack/openstackclient" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.704554 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/516f803f-f2d8-46fe-b118-3fb269827dbd-combined-ca-bundle\") pod \"openstackclient\" (UID: \"516f803f-f2d8-46fe-b118-3fb269827dbd\") " pod="openstack/openstackclient" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.704668 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rktp9\" (UniqueName: \"kubernetes.io/projected/516f803f-f2d8-46fe-b118-3fb269827dbd-kube-api-access-rktp9\") pod \"openstackclient\" (UID: \"516f803f-f2d8-46fe-b118-3fb269827dbd\") " pod="openstack/openstackclient" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.704736 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/516f803f-f2d8-46fe-b118-3fb269827dbd-openstack-config-secret\") pod \"openstackclient\" (UID: \"516f803f-f2d8-46fe-b118-3fb269827dbd\") " pod="openstack/openstackclient" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.704786 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/516f803f-f2d8-46fe-b118-3fb269827dbd-openstack-config\") pod \"openstackclient\" (UID: \"516f803f-f2d8-46fe-b118-3fb269827dbd\") " pod="openstack/openstackclient" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.706457 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/516f803f-f2d8-46fe-b118-3fb269827dbd-openstack-config\") pod \"openstackclient\" (UID: \"516f803f-f2d8-46fe-b118-3fb269827dbd\") " pod="openstack/openstackclient" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.712003 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/516f803f-f2d8-46fe-b118-3fb269827dbd-openstack-config-secret\") pod \"openstackclient\" (UID: \"516f803f-f2d8-46fe-b118-3fb269827dbd\") " pod="openstack/openstackclient" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.724328 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/516f803f-f2d8-46fe-b118-3fb269827dbd-combined-ca-bundle\") pod \"openstackclient\" (UID: \"516f803f-f2d8-46fe-b118-3fb269827dbd\") " pod="openstack/openstackclient" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.753652 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rktp9\" (UniqueName: \"kubernetes.io/projected/516f803f-f2d8-46fe-b118-3fb269827dbd-kube-api-access-rktp9\") pod \"openstackclient\" (UID: \"516f803f-f2d8-46fe-b118-3fb269827dbd\") " pod="openstack/openstackclient" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.799665 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a3e75c6-fbd8-4c12-9d38-8c9684b13d15" path="/var/lib/kubelet/pods/7a3e75c6-fbd8-4c12-9d38-8c9684b13d15/volumes" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.840181 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.851461 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.895572 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.909226 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.910507 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 17:04:29 crc kubenswrapper[4747]: I1202 17:04:29.932343 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.011016 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af70033c-2a14-481a-a85d-3063c09611fd-combined-ca-bundle\") pod \"openstackclient\" (UID: \"af70033c-2a14-481a-a85d-3063c09611fd\") " pod="openstack/openstackclient" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.011118 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/af70033c-2a14-481a-a85d-3063c09611fd-openstack-config\") pod \"openstackclient\" (UID: \"af70033c-2a14-481a-a85d-3063c09611fd\") " pod="openstack/openstackclient" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.011151 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdxbf\" (UniqueName: \"kubernetes.io/projected/af70033c-2a14-481a-a85d-3063c09611fd-kube-api-access-bdxbf\") pod \"openstackclient\" (UID: \"af70033c-2a14-481a-a85d-3063c09611fd\") " pod="openstack/openstackclient" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.011618 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/af70033c-2a14-481a-a85d-3063c09611fd-openstack-config-secret\") pod \"openstackclient\" (UID: \"af70033c-2a14-481a-a85d-3063c09611fd\") " pod="openstack/openstackclient" Dec 02 17:04:30 crc kubenswrapper[4747]: E1202 17:04:30.062060 4747 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 02 17:04:30 crc kubenswrapper[4747]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_516f803f-f2d8-46fe-b118-3fb269827dbd_0(3ef2e6e23515d161320f101e21b9dc7db26c669ceb70577330bd42ed3b018dc1): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"3ef2e6e23515d161320f101e21b9dc7db26c669ceb70577330bd42ed3b018dc1" Netns:"/var/run/netns/79acad6b-36ab-488a-b3f8-99456187c0da" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=3ef2e6e23515d161320f101e21b9dc7db26c669ceb70577330bd42ed3b018dc1;K8S_POD_UID=516f803f-f2d8-46fe-b118-3fb269827dbd" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/516f803f-f2d8-46fe-b118-3fb269827dbd]: expected pod UID "516f803f-f2d8-46fe-b118-3fb269827dbd" but got "af70033c-2a14-481a-a85d-3063c09611fd" from Kube API Dec 02 17:04:30 crc kubenswrapper[4747]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 02 17:04:30 crc kubenswrapper[4747]: > Dec 02 17:04:30 crc kubenswrapper[4747]: E1202 17:04:30.062150 4747 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 02 17:04:30 crc kubenswrapper[4747]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_516f803f-f2d8-46fe-b118-3fb269827dbd_0(3ef2e6e23515d161320f101e21b9dc7db26c669ceb70577330bd42ed3b018dc1): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"3ef2e6e23515d161320f101e21b9dc7db26c669ceb70577330bd42ed3b018dc1" Netns:"/var/run/netns/79acad6b-36ab-488a-b3f8-99456187c0da" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=3ef2e6e23515d161320f101e21b9dc7db26c669ceb70577330bd42ed3b018dc1;K8S_POD_UID=516f803f-f2d8-46fe-b118-3fb269827dbd" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/516f803f-f2d8-46fe-b118-3fb269827dbd]: expected pod UID "516f803f-f2d8-46fe-b118-3fb269827dbd" but got "af70033c-2a14-481a-a85d-3063c09611fd" from Kube API Dec 02 17:04:30 crc kubenswrapper[4747]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 02 17:04:30 crc kubenswrapper[4747]: > pod="openstack/openstackclient" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.113335 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/af70033c-2a14-481a-a85d-3063c09611fd-openstack-config-secret\") pod \"openstackclient\" (UID: \"af70033c-2a14-481a-a85d-3063c09611fd\") " pod="openstack/openstackclient" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.113460 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af70033c-2a14-481a-a85d-3063c09611fd-combined-ca-bundle\") pod \"openstackclient\" (UID: \"af70033c-2a14-481a-a85d-3063c09611fd\") " pod="openstack/openstackclient" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.113503 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/af70033c-2a14-481a-a85d-3063c09611fd-openstack-config\") pod \"openstackclient\" (UID: \"af70033c-2a14-481a-a85d-3063c09611fd\") " pod="openstack/openstackclient" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.113540 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdxbf\" (UniqueName: \"kubernetes.io/projected/af70033c-2a14-481a-a85d-3063c09611fd-kube-api-access-bdxbf\") pod \"openstackclient\" (UID: \"af70033c-2a14-481a-a85d-3063c09611fd\") " pod="openstack/openstackclient" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.114932 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/af70033c-2a14-481a-a85d-3063c09611fd-openstack-config\") pod \"openstackclient\" (UID: \"af70033c-2a14-481a-a85d-3063c09611fd\") " pod="openstack/openstackclient" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.118766 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/af70033c-2a14-481a-a85d-3063c09611fd-openstack-config-secret\") pod \"openstackclient\" (UID: \"af70033c-2a14-481a-a85d-3063c09611fd\") " pod="openstack/openstackclient" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.118854 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af70033c-2a14-481a-a85d-3063c09611fd-combined-ca-bundle\") pod \"openstackclient\" (UID: \"af70033c-2a14-481a-a85d-3063c09611fd\") " pod="openstack/openstackclient" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.168694 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdxbf\" (UniqueName: \"kubernetes.io/projected/af70033c-2a14-481a-a85d-3063c09611fd-kube-api-access-bdxbf\") pod \"openstackclient\" (UID: \"af70033c-2a14-481a-a85d-3063c09611fd\") " pod="openstack/openstackclient" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.268023 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.568110 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.576076 4747 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="516f803f-f2d8-46fe-b118-3fb269827dbd" podUID="af70033c-2a14-481a-a85d-3063c09611fd" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.587075 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.725708 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/516f803f-f2d8-46fe-b118-3fb269827dbd-openstack-config\") pod \"516f803f-f2d8-46fe-b118-3fb269827dbd\" (UID: \"516f803f-f2d8-46fe-b118-3fb269827dbd\") " Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.725831 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rktp9\" (UniqueName: \"kubernetes.io/projected/516f803f-f2d8-46fe-b118-3fb269827dbd-kube-api-access-rktp9\") pod \"516f803f-f2d8-46fe-b118-3fb269827dbd\" (UID: \"516f803f-f2d8-46fe-b118-3fb269827dbd\") " Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.725969 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/516f803f-f2d8-46fe-b118-3fb269827dbd-combined-ca-bundle\") pod \"516f803f-f2d8-46fe-b118-3fb269827dbd\" (UID: \"516f803f-f2d8-46fe-b118-3fb269827dbd\") " Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.726046 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/516f803f-f2d8-46fe-b118-3fb269827dbd-openstack-config-secret\") pod \"516f803f-f2d8-46fe-b118-3fb269827dbd\" (UID: \"516f803f-f2d8-46fe-b118-3fb269827dbd\") " Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.727487 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/516f803f-f2d8-46fe-b118-3fb269827dbd-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "516f803f-f2d8-46fe-b118-3fb269827dbd" (UID: "516f803f-f2d8-46fe-b118-3fb269827dbd"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.738203 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/516f803f-f2d8-46fe-b118-3fb269827dbd-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "516f803f-f2d8-46fe-b118-3fb269827dbd" (UID: "516f803f-f2d8-46fe-b118-3fb269827dbd"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.738260 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/516f803f-f2d8-46fe-b118-3fb269827dbd-kube-api-access-rktp9" (OuterVolumeSpecName: "kube-api-access-rktp9") pod "516f803f-f2d8-46fe-b118-3fb269827dbd" (UID: "516f803f-f2d8-46fe-b118-3fb269827dbd"). InnerVolumeSpecName "kube-api-access-rktp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.739019 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/516f803f-f2d8-46fe-b118-3fb269827dbd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "516f803f-f2d8-46fe-b118-3fb269827dbd" (UID: "516f803f-f2d8-46fe-b118-3fb269827dbd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.815751 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.829404 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/516f803f-f2d8-46fe-b118-3fb269827dbd-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.829520 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rktp9\" (UniqueName: \"kubernetes.io/projected/516f803f-f2d8-46fe-b118-3fb269827dbd-kube-api-access-rktp9\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.829589 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/516f803f-f2d8-46fe-b118-3fb269827dbd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:30 crc kubenswrapper[4747]: I1202 17:04:30.829670 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/516f803f-f2d8-46fe-b118-3fb269827dbd-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:31 crc kubenswrapper[4747]: I1202 17:04:31.040076 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 02 17:04:31 crc kubenswrapper[4747]: I1202 17:04:31.581119 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"af70033c-2a14-481a-a85d-3063c09611fd","Type":"ContainerStarted","Data":"63ad997d19fb7f1213d882cf28f1766be445e5baf60da185b7f4431fa137447d"} Dec 02 17:04:31 crc kubenswrapper[4747]: I1202 17:04:31.581131 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 17:04:31 crc kubenswrapper[4747]: I1202 17:04:31.585560 4747 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="516f803f-f2d8-46fe-b118-3fb269827dbd" podUID="af70033c-2a14-481a-a85d-3063c09611fd" Dec 02 17:04:31 crc kubenswrapper[4747]: I1202 17:04:31.773090 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="516f803f-f2d8-46fe-b118-3fb269827dbd" path="/var/lib/kubelet/pods/516f803f-f2d8-46fe-b118-3fb269827dbd/volumes" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.478969 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-57db9b8c79-f9rx5"] Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.481452 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.484857 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.487011 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.487808 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.500445 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-57db9b8c79-f9rx5"] Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.584119 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x49cg\" (UniqueName: \"kubernetes.io/projected/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-kube-api-access-x49cg\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.584203 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-config-data\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.584240 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-internal-tls-certs\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.584263 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-log-httpd\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.584404 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-etc-swift\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.584500 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-public-tls-certs\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.584595 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-combined-ca-bundle\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.584618 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-run-httpd\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.686670 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x49cg\" (UniqueName: \"kubernetes.io/projected/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-kube-api-access-x49cg\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.687227 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-config-data\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.687257 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-internal-tls-certs\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.687313 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-log-httpd\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.688184 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-etc-swift\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.688363 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-public-tls-certs\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.688437 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-combined-ca-bundle\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.688469 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-run-httpd\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.689028 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-log-httpd\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.689257 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-run-httpd\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.696375 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-public-tls-certs\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.698601 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-internal-tls-certs\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.698676 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-config-data\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.699176 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-combined-ca-bundle\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.699277 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-etc-swift\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.716948 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x49cg\" (UniqueName: \"kubernetes.io/projected/41bc2800-e0ab-41cf-87a8-4fe9981de2e3-kube-api-access-x49cg\") pod \"swift-proxy-57db9b8c79-f9rx5\" (UID: \"41bc2800-e0ab-41cf-87a8-4fe9981de2e3\") " pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:33 crc kubenswrapper[4747]: I1202 17:04:33.820919 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:34 crc kubenswrapper[4747]: I1202 17:04:34.312035 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5cc747446d-fcnwd" podUID="b26d33d5-1b96-470b-8677-cb5273c72d25" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Dec 02 17:04:34 crc kubenswrapper[4747]: I1202 17:04:34.640074 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-57db9b8c79-f9rx5"] Dec 02 17:04:35 crc kubenswrapper[4747]: I1202 17:04:35.639288 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:35 crc kubenswrapper[4747]: I1202 17:04:35.640169 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="315b434d-0877-4071-9e78-0b5b659edc47" containerName="ceilometer-central-agent" containerID="cri-o://ba5d57c2843b8df50efd299caaa9f7f100f1e4501913030e6110c93a4cc59a80" gracePeriod=30 Dec 02 17:04:35 crc kubenswrapper[4747]: I1202 17:04:35.641038 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="315b434d-0877-4071-9e78-0b5b659edc47" containerName="proxy-httpd" containerID="cri-o://b8b8713752b49db1c5233fa67badc187da764a15df397387e0518ee944f733b1" gracePeriod=30 Dec 02 17:04:35 crc kubenswrapper[4747]: I1202 17:04:35.641087 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="315b434d-0877-4071-9e78-0b5b659edc47" containerName="sg-core" containerID="cri-o://aa330fbeca86b4a7e7074394ff3bc9a06b56ae4abff1920a3fafe4bc2822ba12" gracePeriod=30 Dec 02 17:04:35 crc kubenswrapper[4747]: I1202 17:04:35.641134 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="315b434d-0877-4071-9e78-0b5b659edc47" containerName="ceilometer-notification-agent" containerID="cri-o://37ab0a92fa677769f6ab910c1175163cc105f8a08c7579d88d594b2530d61f44" gracePeriod=30 Dec 02 17:04:35 crc kubenswrapper[4747]: I1202 17:04:35.664899 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 02 17:04:35 crc kubenswrapper[4747]: I1202 17:04:35.674750 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-57db9b8c79-f9rx5" event={"ID":"41bc2800-e0ab-41cf-87a8-4fe9981de2e3","Type":"ContainerStarted","Data":"18776cd77ae10923866f60f1734ec83eaec9a9277de0c478581f2285d3708991"} Dec 02 17:04:35 crc kubenswrapper[4747]: I1202 17:04:35.674825 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-57db9b8c79-f9rx5" event={"ID":"41bc2800-e0ab-41cf-87a8-4fe9981de2e3","Type":"ContainerStarted","Data":"3ff41ce276e779f70caea32955a08ab68c4b141de87abcae9a36a4f4ebb0c444"} Dec 02 17:04:35 crc kubenswrapper[4747]: I1202 17:04:35.674873 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-57db9b8c79-f9rx5" event={"ID":"41bc2800-e0ab-41cf-87a8-4fe9981de2e3","Type":"ContainerStarted","Data":"78ce52eacb739fa38b8e8870178abdfe9cfa981a0ffe4a2a0daa4cfd6db15708"} Dec 02 17:04:35 crc kubenswrapper[4747]: I1202 17:04:35.675442 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:35 crc kubenswrapper[4747]: I1202 17:04:35.734243 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-57db9b8c79-f9rx5" podStartSLOduration=2.7342206989999998 podStartE2EDuration="2.734220699s" podCreationTimestamp="2025-12-02 17:04:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:04:35.721210747 +0000 UTC m=+1306.248099496" watchObservedRunningTime="2025-12-02 17:04:35.734220699 +0000 UTC m=+1306.261109448" Dec 02 17:04:36 crc kubenswrapper[4747]: I1202 17:04:36.697424 4747 generic.go:334] "Generic (PLEG): container finished" podID="315b434d-0877-4071-9e78-0b5b659edc47" containerID="b8b8713752b49db1c5233fa67badc187da764a15df397387e0518ee944f733b1" exitCode=0 Dec 02 17:04:36 crc kubenswrapper[4747]: I1202 17:04:36.697828 4747 generic.go:334] "Generic (PLEG): container finished" podID="315b434d-0877-4071-9e78-0b5b659edc47" containerID="aa330fbeca86b4a7e7074394ff3bc9a06b56ae4abff1920a3fafe4bc2822ba12" exitCode=2 Dec 02 17:04:36 crc kubenswrapper[4747]: I1202 17:04:36.697839 4747 generic.go:334] "Generic (PLEG): container finished" podID="315b434d-0877-4071-9e78-0b5b659edc47" containerID="ba5d57c2843b8df50efd299caaa9f7f100f1e4501913030e6110c93a4cc59a80" exitCode=0 Dec 02 17:04:36 crc kubenswrapper[4747]: I1202 17:04:36.697499 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"315b434d-0877-4071-9e78-0b5b659edc47","Type":"ContainerDied","Data":"b8b8713752b49db1c5233fa67badc187da764a15df397387e0518ee944f733b1"} Dec 02 17:04:36 crc kubenswrapper[4747]: I1202 17:04:36.698222 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:36 crc kubenswrapper[4747]: I1202 17:04:36.698301 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"315b434d-0877-4071-9e78-0b5b659edc47","Type":"ContainerDied","Data":"aa330fbeca86b4a7e7074394ff3bc9a06b56ae4abff1920a3fafe4bc2822ba12"} Dec 02 17:04:36 crc kubenswrapper[4747]: I1202 17:04:36.698336 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"315b434d-0877-4071-9e78-0b5b659edc47","Type":"ContainerDied","Data":"ba5d57c2843b8df50efd299caaa9f7f100f1e4501913030e6110c93a4cc59a80"} Dec 02 17:04:38 crc kubenswrapper[4747]: I1202 17:04:38.724148 4747 generic.go:334] "Generic (PLEG): container finished" podID="315b434d-0877-4071-9e78-0b5b659edc47" containerID="37ab0a92fa677769f6ab910c1175163cc105f8a08c7579d88d594b2530d61f44" exitCode=0 Dec 02 17:04:38 crc kubenswrapper[4747]: I1202 17:04:38.724250 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"315b434d-0877-4071-9e78-0b5b659edc47","Type":"ContainerDied","Data":"37ab0a92fa677769f6ab910c1175163cc105f8a08c7579d88d594b2530d61f44"} Dec 02 17:04:39 crc kubenswrapper[4747]: I1202 17:04:39.132123 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="315b434d-0877-4071-9e78-0b5b659edc47" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.164:3000/\": dial tcp 10.217.0.164:3000: connect: connection refused" Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.101047 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-t4cdr"] Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.103016 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-t4cdr" Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.110785 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-t4cdr"] Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.195361 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-wjll8"] Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.198690 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-wjll8" Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.209669 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-wjll8"] Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.240248 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjljx\" (UniqueName: \"kubernetes.io/projected/baa3122f-58e4-4adb-b773-1207e27ed51f-kube-api-access-wjljx\") pod \"nova-api-db-create-t4cdr\" (UID: \"baa3122f-58e4-4adb-b773-1207e27ed51f\") " pod="openstack/nova-api-db-create-t4cdr" Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.342136 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjljx\" (UniqueName: \"kubernetes.io/projected/baa3122f-58e4-4adb-b773-1207e27ed51f-kube-api-access-wjljx\") pod \"nova-api-db-create-t4cdr\" (UID: \"baa3122f-58e4-4adb-b773-1207e27ed51f\") " pod="openstack/nova-api-db-create-t4cdr" Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.342245 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9r8d9\" (UniqueName: \"kubernetes.io/projected/a171e5ef-7316-4dc0-a40c-9bac98d8f28a-kube-api-access-9r8d9\") pod \"nova-cell0-db-create-wjll8\" (UID: \"a171e5ef-7316-4dc0-a40c-9bac98d8f28a\") " pod="openstack/nova-cell0-db-create-wjll8" Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.374594 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjljx\" (UniqueName: \"kubernetes.io/projected/baa3122f-58e4-4adb-b773-1207e27ed51f-kube-api-access-wjljx\") pod \"nova-api-db-create-t4cdr\" (UID: \"baa3122f-58e4-4adb-b773-1207e27ed51f\") " pod="openstack/nova-api-db-create-t4cdr" Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.421944 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-fq9j2"] Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.424079 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-fq9j2" Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.438117 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-fq9j2"] Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.439429 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-t4cdr" Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.444255 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9r8d9\" (UniqueName: \"kubernetes.io/projected/a171e5ef-7316-4dc0-a40c-9bac98d8f28a-kube-api-access-9r8d9\") pod \"nova-cell0-db-create-wjll8\" (UID: \"a171e5ef-7316-4dc0-a40c-9bac98d8f28a\") " pod="openstack/nova-cell0-db-create-wjll8" Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.478610 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9r8d9\" (UniqueName: \"kubernetes.io/projected/a171e5ef-7316-4dc0-a40c-9bac98d8f28a-kube-api-access-9r8d9\") pod \"nova-cell0-db-create-wjll8\" (UID: \"a171e5ef-7316-4dc0-a40c-9bac98d8f28a\") " pod="openstack/nova-cell0-db-create-wjll8" Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.518428 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-wjll8" Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.556418 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wc6w\" (UniqueName: \"kubernetes.io/projected/eff0029d-2d34-42be-b5f0-fd49f3f6b3ac-kube-api-access-2wc6w\") pod \"nova-cell1-db-create-fq9j2\" (UID: \"eff0029d-2d34-42be-b5f0-fd49f3f6b3ac\") " pod="openstack/nova-cell1-db-create-fq9j2" Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.658128 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wc6w\" (UniqueName: \"kubernetes.io/projected/eff0029d-2d34-42be-b5f0-fd49f3f6b3ac-kube-api-access-2wc6w\") pod \"nova-cell1-db-create-fq9j2\" (UID: \"eff0029d-2d34-42be-b5f0-fd49f3f6b3ac\") " pod="openstack/nova-cell1-db-create-fq9j2" Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.678002 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wc6w\" (UniqueName: \"kubernetes.io/projected/eff0029d-2d34-42be-b5f0-fd49f3f6b3ac-kube-api-access-2wc6w\") pod \"nova-cell1-db-create-fq9j2\" (UID: \"eff0029d-2d34-42be-b5f0-fd49f3f6b3ac\") " pod="openstack/nova-cell1-db-create-fq9j2" Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.764515 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-fq9j2" Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.874995 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:43 crc kubenswrapper[4747]: I1202 17:04:43.875370 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-57db9b8c79-f9rx5" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.119181 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.281635 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/315b434d-0877-4071-9e78-0b5b659edc47-log-httpd\") pod \"315b434d-0877-4071-9e78-0b5b659edc47\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.281808 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vgfmk\" (UniqueName: \"kubernetes.io/projected/315b434d-0877-4071-9e78-0b5b659edc47-kube-api-access-vgfmk\") pod \"315b434d-0877-4071-9e78-0b5b659edc47\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.281841 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-combined-ca-bundle\") pod \"315b434d-0877-4071-9e78-0b5b659edc47\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.281866 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/315b434d-0877-4071-9e78-0b5b659edc47-run-httpd\") pod \"315b434d-0877-4071-9e78-0b5b659edc47\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.281919 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-scripts\") pod \"315b434d-0877-4071-9e78-0b5b659edc47\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.282019 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-sg-core-conf-yaml\") pod \"315b434d-0877-4071-9e78-0b5b659edc47\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.282043 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-config-data\") pod \"315b434d-0877-4071-9e78-0b5b659edc47\" (UID: \"315b434d-0877-4071-9e78-0b5b659edc47\") " Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.283393 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/315b434d-0877-4071-9e78-0b5b659edc47-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "315b434d-0877-4071-9e78-0b5b659edc47" (UID: "315b434d-0877-4071-9e78-0b5b659edc47"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.283450 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/315b434d-0877-4071-9e78-0b5b659edc47-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "315b434d-0877-4071-9e78-0b5b659edc47" (UID: "315b434d-0877-4071-9e78-0b5b659edc47"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.293794 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-scripts" (OuterVolumeSpecName: "scripts") pod "315b434d-0877-4071-9e78-0b5b659edc47" (UID: "315b434d-0877-4071-9e78-0b5b659edc47"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.297149 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/315b434d-0877-4071-9e78-0b5b659edc47-kube-api-access-vgfmk" (OuterVolumeSpecName: "kube-api-access-vgfmk") pod "315b434d-0877-4071-9e78-0b5b659edc47" (UID: "315b434d-0877-4071-9e78-0b5b659edc47"). InnerVolumeSpecName "kube-api-access-vgfmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.311895 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5cc747446d-fcnwd" podUID="b26d33d5-1b96-470b-8677-cb5273c72d25" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.312113 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.326014 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "315b434d-0877-4071-9e78-0b5b659edc47" (UID: "315b434d-0877-4071-9e78-0b5b659edc47"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.391840 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.391893 4747 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.391923 4747 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/315b434d-0877-4071-9e78-0b5b659edc47-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.391941 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vgfmk\" (UniqueName: \"kubernetes.io/projected/315b434d-0877-4071-9e78-0b5b659edc47-kube-api-access-vgfmk\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.391954 4747 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/315b434d-0877-4071-9e78-0b5b659edc47-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.537071 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-t4cdr"] Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.645033 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-wjll8"] Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.665121 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "315b434d-0877-4071-9e78-0b5b659edc47" (UID: "315b434d-0877-4071-9e78-0b5b659edc47"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.666802 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-config-data" (OuterVolumeSpecName: "config-data") pod "315b434d-0877-4071-9e78-0b5b659edc47" (UID: "315b434d-0877-4071-9e78-0b5b659edc47"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.708437 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-fq9j2"] Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.714717 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.714774 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/315b434d-0877-4071-9e78-0b5b659edc47-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:44 crc kubenswrapper[4747]: W1202 17:04:44.792080 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeff0029d_2d34_42be_b5f0_fd49f3f6b3ac.slice/crio-29a2c061dfa86161f1299181571b80a9aaac27d78f298c1a44afbd480f957692 WatchSource:0}: Error finding container 29a2c061dfa86161f1299181571b80a9aaac27d78f298c1a44afbd480f957692: Status 404 returned error can't find the container with id 29a2c061dfa86161f1299181571b80a9aaac27d78f298c1a44afbd480f957692 Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.814567 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-fq9j2" event={"ID":"eff0029d-2d34-42be-b5f0-fd49f3f6b3ac","Type":"ContainerStarted","Data":"29a2c061dfa86161f1299181571b80a9aaac27d78f298c1a44afbd480f957692"} Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.822029 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-t4cdr" event={"ID":"baa3122f-58e4-4adb-b773-1207e27ed51f","Type":"ContainerStarted","Data":"c32b1edf66e0fa8fb67872adcee667819d5a694b8b0c24cbd788ad8c331cb013"} Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.873511 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.878202 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"315b434d-0877-4071-9e78-0b5b659edc47","Type":"ContainerDied","Data":"5b321f251c22b5cceba19259b2c91f58425efdf0fbfb3d8503d06df8b8a7f6e7"} Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.878293 4747 scope.go:117] "RemoveContainer" containerID="b8b8713752b49db1c5233fa67badc187da764a15df397387e0518ee944f733b1" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.881787 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-wjll8" event={"ID":"a171e5ef-7316-4dc0-a40c-9bac98d8f28a","Type":"ContainerStarted","Data":"fc3274f130668d5540e3f1c5ab146e34d8acf8e6ab3ff6d85a9c366b8b2ea592"} Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.888123 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"af70033c-2a14-481a-a85d-3063c09611fd","Type":"ContainerStarted","Data":"b709e6146cf189359da67e487c4025cbdd30fcf5ddf29463564fbbbfaa4cc3ca"} Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.945409 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.899765705 podStartE2EDuration="15.945377541s" podCreationTimestamp="2025-12-02 17:04:29 +0000 UTC" firstStartedPulling="2025-12-02 17:04:30.828129863 +0000 UTC m=+1301.355018602" lastFinishedPulling="2025-12-02 17:04:43.873741689 +0000 UTC m=+1314.400630438" observedRunningTime="2025-12-02 17:04:44.932465162 +0000 UTC m=+1315.459353921" watchObservedRunningTime="2025-12-02 17:04:44.945377541 +0000 UTC m=+1315.472266310" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.988269 4747 scope.go:117] "RemoveContainer" containerID="aa330fbeca86b4a7e7074394ff3bc9a06b56ae4abff1920a3fafe4bc2822ba12" Dec 02 17:04:44 crc kubenswrapper[4747]: I1202 17:04:44.994199 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.025592 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.055982 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:45 crc kubenswrapper[4747]: E1202 17:04:45.056632 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="315b434d-0877-4071-9e78-0b5b659edc47" containerName="sg-core" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.056657 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="315b434d-0877-4071-9e78-0b5b659edc47" containerName="sg-core" Dec 02 17:04:45 crc kubenswrapper[4747]: E1202 17:04:45.056670 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="315b434d-0877-4071-9e78-0b5b659edc47" containerName="ceilometer-notification-agent" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.056681 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="315b434d-0877-4071-9e78-0b5b659edc47" containerName="ceilometer-notification-agent" Dec 02 17:04:45 crc kubenswrapper[4747]: E1202 17:04:45.056696 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="315b434d-0877-4071-9e78-0b5b659edc47" containerName="proxy-httpd" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.056702 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="315b434d-0877-4071-9e78-0b5b659edc47" containerName="proxy-httpd" Dec 02 17:04:45 crc kubenswrapper[4747]: E1202 17:04:45.056731 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="315b434d-0877-4071-9e78-0b5b659edc47" containerName="ceilometer-central-agent" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.056737 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="315b434d-0877-4071-9e78-0b5b659edc47" containerName="ceilometer-central-agent" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.056969 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="315b434d-0877-4071-9e78-0b5b659edc47" containerName="proxy-httpd" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.056985 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="315b434d-0877-4071-9e78-0b5b659edc47" containerName="ceilometer-central-agent" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.056994 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="315b434d-0877-4071-9e78-0b5b659edc47" containerName="sg-core" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.057005 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="315b434d-0877-4071-9e78-0b5b659edc47" containerName="ceilometer-notification-agent" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.062850 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.063705 4747 scope.go:117] "RemoveContainer" containerID="37ab0a92fa677769f6ab910c1175163cc105f8a08c7579d88d594b2530d61f44" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.065043 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.065465 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.072351 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.095125 4747 scope.go:117] "RemoveContainer" containerID="ba5d57c2843b8df50efd299caaa9f7f100f1e4501913030e6110c93a4cc59a80" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.241522 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-scripts\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.241624 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.241675 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.241700 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f48416ba-24f9-4d75-921c-6ac3f9f6396e-log-httpd\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.241738 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-config-data\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.241758 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twfpd\" (UniqueName: \"kubernetes.io/projected/f48416ba-24f9-4d75-921c-6ac3f9f6396e-kube-api-access-twfpd\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.241807 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f48416ba-24f9-4d75-921c-6ac3f9f6396e-run-httpd\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.343994 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-config-data\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.344075 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twfpd\" (UniqueName: \"kubernetes.io/projected/f48416ba-24f9-4d75-921c-6ac3f9f6396e-kube-api-access-twfpd\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.344185 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f48416ba-24f9-4d75-921c-6ac3f9f6396e-run-httpd\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.344285 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-scripts\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.344379 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.344462 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.344502 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f48416ba-24f9-4d75-921c-6ac3f9f6396e-log-httpd\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.344775 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f48416ba-24f9-4d75-921c-6ac3f9f6396e-run-httpd\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.344960 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f48416ba-24f9-4d75-921c-6ac3f9f6396e-log-httpd\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.349184 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-config-data\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.350350 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-scripts\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.352431 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.354930 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.364528 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twfpd\" (UniqueName: \"kubernetes.io/projected/f48416ba-24f9-4d75-921c-6ac3f9f6396e-kube-api-access-twfpd\") pod \"ceilometer-0\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.410304 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.782305 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="315b434d-0877-4071-9e78-0b5b659edc47" path="/var/lib/kubelet/pods/315b434d-0877-4071-9e78-0b5b659edc47/volumes" Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.921957 4747 generic.go:334] "Generic (PLEG): container finished" podID="a171e5ef-7316-4dc0-a40c-9bac98d8f28a" containerID="ef4da0d72c8611ea8a36925d6edd4b9b22a38bbed46f7f736dd2a1c21950d36a" exitCode=0 Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.922032 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-wjll8" event={"ID":"a171e5ef-7316-4dc0-a40c-9bac98d8f28a","Type":"ContainerDied","Data":"ef4da0d72c8611ea8a36925d6edd4b9b22a38bbed46f7f736dd2a1c21950d36a"} Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.924494 4747 generic.go:334] "Generic (PLEG): container finished" podID="eff0029d-2d34-42be-b5f0-fd49f3f6b3ac" containerID="92cc0ab81906b40f358235d2e16f9247f1bf8e58d81c649e655fb9cb6d5072fa" exitCode=0 Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.924605 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-fq9j2" event={"ID":"eff0029d-2d34-42be-b5f0-fd49f3f6b3ac","Type":"ContainerDied","Data":"92cc0ab81906b40f358235d2e16f9247f1bf8e58d81c649e655fb9cb6d5072fa"} Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.927772 4747 generic.go:334] "Generic (PLEG): container finished" podID="baa3122f-58e4-4adb-b773-1207e27ed51f" containerID="b0d760da09ed4cc286a9e91941b45b8d460c2339b89e2b3b7a8a3583545a1567" exitCode=0 Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.927950 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-t4cdr" event={"ID":"baa3122f-58e4-4adb-b773-1207e27ed51f","Type":"ContainerDied","Data":"b0d760da09ed4cc286a9e91941b45b8d460c2339b89e2b3b7a8a3583545a1567"} Dec 02 17:04:45 crc kubenswrapper[4747]: I1202 17:04:45.974018 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:46 crc kubenswrapper[4747]: I1202 17:04:46.940193 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f48416ba-24f9-4d75-921c-6ac3f9f6396e","Type":"ContainerStarted","Data":"1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576"} Dec 02 17:04:46 crc kubenswrapper[4747]: I1202 17:04:46.940543 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f48416ba-24f9-4d75-921c-6ac3f9f6396e","Type":"ContainerStarted","Data":"462c66b1d3da2289a55d848373ce9ce041ffca1c6ae3d67283f4a59dd34ac68c"} Dec 02 17:04:47 crc kubenswrapper[4747]: I1202 17:04:47.423037 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-fq9j2" Dec 02 17:04:47 crc kubenswrapper[4747]: I1202 17:04:47.592174 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wc6w\" (UniqueName: \"kubernetes.io/projected/eff0029d-2d34-42be-b5f0-fd49f3f6b3ac-kube-api-access-2wc6w\") pod \"eff0029d-2d34-42be-b5f0-fd49f3f6b3ac\" (UID: \"eff0029d-2d34-42be-b5f0-fd49f3f6b3ac\") " Dec 02 17:04:47 crc kubenswrapper[4747]: I1202 17:04:47.599212 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-wjll8" Dec 02 17:04:47 crc kubenswrapper[4747]: I1202 17:04:47.600238 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eff0029d-2d34-42be-b5f0-fd49f3f6b3ac-kube-api-access-2wc6w" (OuterVolumeSpecName: "kube-api-access-2wc6w") pod "eff0029d-2d34-42be-b5f0-fd49f3f6b3ac" (UID: "eff0029d-2d34-42be-b5f0-fd49f3f6b3ac"). InnerVolumeSpecName "kube-api-access-2wc6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:47 crc kubenswrapper[4747]: I1202 17:04:47.602967 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-t4cdr" Dec 02 17:04:47 crc kubenswrapper[4747]: I1202 17:04:47.695145 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9r8d9\" (UniqueName: \"kubernetes.io/projected/a171e5ef-7316-4dc0-a40c-9bac98d8f28a-kube-api-access-9r8d9\") pod \"a171e5ef-7316-4dc0-a40c-9bac98d8f28a\" (UID: \"a171e5ef-7316-4dc0-a40c-9bac98d8f28a\") " Dec 02 17:04:47 crc kubenswrapper[4747]: I1202 17:04:47.696036 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wc6w\" (UniqueName: \"kubernetes.io/projected/eff0029d-2d34-42be-b5f0-fd49f3f6b3ac-kube-api-access-2wc6w\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:47 crc kubenswrapper[4747]: I1202 17:04:47.730211 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a171e5ef-7316-4dc0-a40c-9bac98d8f28a-kube-api-access-9r8d9" (OuterVolumeSpecName: "kube-api-access-9r8d9") pod "a171e5ef-7316-4dc0-a40c-9bac98d8f28a" (UID: "a171e5ef-7316-4dc0-a40c-9bac98d8f28a"). InnerVolumeSpecName "kube-api-access-9r8d9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:47 crc kubenswrapper[4747]: I1202 17:04:47.800867 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjljx\" (UniqueName: \"kubernetes.io/projected/baa3122f-58e4-4adb-b773-1207e27ed51f-kube-api-access-wjljx\") pod \"baa3122f-58e4-4adb-b773-1207e27ed51f\" (UID: \"baa3122f-58e4-4adb-b773-1207e27ed51f\") " Dec 02 17:04:47 crc kubenswrapper[4747]: I1202 17:04:47.802066 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9r8d9\" (UniqueName: \"kubernetes.io/projected/a171e5ef-7316-4dc0-a40c-9bac98d8f28a-kube-api-access-9r8d9\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:47 crc kubenswrapper[4747]: I1202 17:04:47.829187 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/baa3122f-58e4-4adb-b773-1207e27ed51f-kube-api-access-wjljx" (OuterVolumeSpecName: "kube-api-access-wjljx") pod "baa3122f-58e4-4adb-b773-1207e27ed51f" (UID: "baa3122f-58e4-4adb-b773-1207e27ed51f"). InnerVolumeSpecName "kube-api-access-wjljx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:47 crc kubenswrapper[4747]: I1202 17:04:47.905804 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjljx\" (UniqueName: \"kubernetes.io/projected/baa3122f-58e4-4adb-b773-1207e27ed51f-kube-api-access-wjljx\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:47 crc kubenswrapper[4747]: I1202 17:04:47.994509 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f48416ba-24f9-4d75-921c-6ac3f9f6396e","Type":"ContainerStarted","Data":"f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5"} Dec 02 17:04:47 crc kubenswrapper[4747]: I1202 17:04:47.997771 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-fq9j2" Dec 02 17:04:47 crc kubenswrapper[4747]: I1202 17:04:47.997811 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-fq9j2" event={"ID":"eff0029d-2d34-42be-b5f0-fd49f3f6b3ac","Type":"ContainerDied","Data":"29a2c061dfa86161f1299181571b80a9aaac27d78f298c1a44afbd480f957692"} Dec 02 17:04:47 crc kubenswrapper[4747]: I1202 17:04:47.997838 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29a2c061dfa86161f1299181571b80a9aaac27d78f298c1a44afbd480f957692" Dec 02 17:04:48 crc kubenswrapper[4747]: I1202 17:04:48.003191 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-t4cdr" event={"ID":"baa3122f-58e4-4adb-b773-1207e27ed51f","Type":"ContainerDied","Data":"c32b1edf66e0fa8fb67872adcee667819d5a694b8b0c24cbd788ad8c331cb013"} Dec 02 17:04:48 crc kubenswrapper[4747]: I1202 17:04:48.003223 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c32b1edf66e0fa8fb67872adcee667819d5a694b8b0c24cbd788ad8c331cb013" Dec 02 17:04:48 crc kubenswrapper[4747]: I1202 17:04:48.003225 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-t4cdr" Dec 02 17:04:48 crc kubenswrapper[4747]: I1202 17:04:48.006103 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-wjll8" event={"ID":"a171e5ef-7316-4dc0-a40c-9bac98d8f28a","Type":"ContainerDied","Data":"fc3274f130668d5540e3f1c5ab146e34d8acf8e6ab3ff6d85a9c366b8b2ea592"} Dec 02 17:04:48 crc kubenswrapper[4747]: I1202 17:04:48.006149 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc3274f130668d5540e3f1c5ab146e34d8acf8e6ab3ff6d85a9c366b8b2ea592" Dec 02 17:04:48 crc kubenswrapper[4747]: I1202 17:04:48.006254 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-wjll8" Dec 02 17:04:48 crc kubenswrapper[4747]: I1202 17:04:48.273765 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 17:04:48 crc kubenswrapper[4747]: I1202 17:04:48.274114 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ce4f4582-9147-4f5a-bf73-7d86cf4298da" containerName="glance-log" containerID="cri-o://94e08e3f8fde06a4189e486e1d074482757272ead4c513a89348683519c71528" gracePeriod=30 Dec 02 17:04:48 crc kubenswrapper[4747]: I1202 17:04:48.274273 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ce4f4582-9147-4f5a-bf73-7d86cf4298da" containerName="glance-httpd" containerID="cri-o://05fd0c732f6d63146451b147c99067b0a248cffd9a8b19a504b668e33e1434f9" gracePeriod=30 Dec 02 17:04:49 crc kubenswrapper[4747]: I1202 17:04:49.019000 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f48416ba-24f9-4d75-921c-6ac3f9f6396e","Type":"ContainerStarted","Data":"d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd"} Dec 02 17:04:49 crc kubenswrapper[4747]: I1202 17:04:49.021505 4747 generic.go:334] "Generic (PLEG): container finished" podID="ce4f4582-9147-4f5a-bf73-7d86cf4298da" containerID="94e08e3f8fde06a4189e486e1d074482757272ead4c513a89348683519c71528" exitCode=143 Dec 02 17:04:49 crc kubenswrapper[4747]: I1202 17:04:49.021561 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ce4f4582-9147-4f5a-bf73-7d86cf4298da","Type":"ContainerDied","Data":"94e08e3f8fde06a4189e486e1d074482757272ead4c513a89348683519c71528"} Dec 02 17:04:49 crc kubenswrapper[4747]: I1202 17:04:49.339058 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 17:04:49 crc kubenswrapper[4747]: I1202 17:04:49.339380 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="649da975-bf0f-4472-83e6-abcf48197293" containerName="glance-log" containerID="cri-o://1aa6c65fa3c889db4c4d07dfde2aea060ad6f2269e943afaf7fee37504b74a3c" gracePeriod=30 Dec 02 17:04:49 crc kubenswrapper[4747]: I1202 17:04:49.339508 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="649da975-bf0f-4472-83e6-abcf48197293" containerName="glance-httpd" containerID="cri-o://4906302f495247f2072be155905a50ec447e50a35debab91dca5aab3c23e8940" gracePeriod=30 Dec 02 17:04:49 crc kubenswrapper[4747]: I1202 17:04:49.597862 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:50 crc kubenswrapper[4747]: I1202 17:04:50.315054 4747 generic.go:334] "Generic (PLEG): container finished" podID="649da975-bf0f-4472-83e6-abcf48197293" containerID="1aa6c65fa3c889db4c4d07dfde2aea060ad6f2269e943afaf7fee37504b74a3c" exitCode=143 Dec 02 17:04:50 crc kubenswrapper[4747]: I1202 17:04:50.315506 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"649da975-bf0f-4472-83e6-abcf48197293","Type":"ContainerDied","Data":"1aa6c65fa3c889db4c4d07dfde2aea060ad6f2269e943afaf7fee37504b74a3c"} Dec 02 17:04:51 crc kubenswrapper[4747]: I1202 17:04:51.327973 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f48416ba-24f9-4d75-921c-6ac3f9f6396e","Type":"ContainerStarted","Data":"bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd"} Dec 02 17:04:51 crc kubenswrapper[4747]: I1202 17:04:51.328213 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerName="sg-core" containerID="cri-o://d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd" gracePeriod=30 Dec 02 17:04:51 crc kubenswrapper[4747]: I1202 17:04:51.328271 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerName="ceilometer-notification-agent" containerID="cri-o://f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5" gracePeriod=30 Dec 02 17:04:51 crc kubenswrapper[4747]: I1202 17:04:51.328260 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerName="proxy-httpd" containerID="cri-o://bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd" gracePeriod=30 Dec 02 17:04:51 crc kubenswrapper[4747]: I1202 17:04:51.328505 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 17:04:51 crc kubenswrapper[4747]: I1202 17:04:51.328139 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerName="ceilometer-central-agent" containerID="cri-o://1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576" gracePeriod=30 Dec 02 17:04:51 crc kubenswrapper[4747]: I1202 17:04:51.361207 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.012185487 podStartE2EDuration="7.361179919s" podCreationTimestamp="2025-12-02 17:04:44 +0000 UTC" firstStartedPulling="2025-12-02 17:04:45.985353357 +0000 UTC m=+1316.512242106" lastFinishedPulling="2025-12-02 17:04:50.334347789 +0000 UTC m=+1320.861236538" observedRunningTime="2025-12-02 17:04:51.350599956 +0000 UTC m=+1321.877488705" watchObservedRunningTime="2025-12-02 17:04:51.361179919 +0000 UTC m=+1321.888068658" Dec 02 17:04:51 crc kubenswrapper[4747]: I1202 17:04:51.907992 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 17:04:51 crc kubenswrapper[4747]: I1202 17:04:51.999820 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce4f4582-9147-4f5a-bf73-7d86cf4298da-httpd-run\") pod \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " Dec 02 17:04:51 crc kubenswrapper[4747]: I1202 17:04:51.999876 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-public-tls-certs\") pod \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.000013 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-config-data\") pod \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.000202 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-combined-ca-bundle\") pod \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.000243 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce4f4582-9147-4f5a-bf73-7d86cf4298da-logs\") pod \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.000287 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6m74\" (UniqueName: \"kubernetes.io/projected/ce4f4582-9147-4f5a-bf73-7d86cf4298da-kube-api-access-f6m74\") pod \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.000321 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.000384 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-scripts\") pod \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\" (UID: \"ce4f4582-9147-4f5a-bf73-7d86cf4298da\") " Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.003327 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce4f4582-9147-4f5a-bf73-7d86cf4298da-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ce4f4582-9147-4f5a-bf73-7d86cf4298da" (UID: "ce4f4582-9147-4f5a-bf73-7d86cf4298da"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.004769 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce4f4582-9147-4f5a-bf73-7d86cf4298da-logs" (OuterVolumeSpecName: "logs") pod "ce4f4582-9147-4f5a-bf73-7d86cf4298da" (UID: "ce4f4582-9147-4f5a-bf73-7d86cf4298da"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.008195 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce4f4582-9147-4f5a-bf73-7d86cf4298da-kube-api-access-f6m74" (OuterVolumeSpecName: "kube-api-access-f6m74") pod "ce4f4582-9147-4f5a-bf73-7d86cf4298da" (UID: "ce4f4582-9147-4f5a-bf73-7d86cf4298da"). InnerVolumeSpecName "kube-api-access-f6m74". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.016350 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "ce4f4582-9147-4f5a-bf73-7d86cf4298da" (UID: "ce4f4582-9147-4f5a-bf73-7d86cf4298da"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.020498 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-scripts" (OuterVolumeSpecName: "scripts") pod "ce4f4582-9147-4f5a-bf73-7d86cf4298da" (UID: "ce4f4582-9147-4f5a-bf73-7d86cf4298da"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.044015 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ce4f4582-9147-4f5a-bf73-7d86cf4298da" (UID: "ce4f4582-9147-4f5a-bf73-7d86cf4298da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.095017 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-config-data" (OuterVolumeSpecName: "config-data") pod "ce4f4582-9147-4f5a-bf73-7d86cf4298da" (UID: "ce4f4582-9147-4f5a-bf73-7d86cf4298da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.103177 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.103212 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce4f4582-9147-4f5a-bf73-7d86cf4298da-logs\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.105100 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6m74\" (UniqueName: \"kubernetes.io/projected/ce4f4582-9147-4f5a-bf73-7d86cf4298da-kube-api-access-f6m74\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.105165 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.105182 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.105195 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce4f4582-9147-4f5a-bf73-7d86cf4298da-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.105225 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.125467 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ce4f4582-9147-4f5a-bf73-7d86cf4298da" (UID: "ce4f4582-9147-4f5a-bf73-7d86cf4298da"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.142315 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.206625 4747 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce4f4582-9147-4f5a-bf73-7d86cf4298da-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.206957 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.245117 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.341468 4747 generic.go:334] "Generic (PLEG): container finished" podID="ce4f4582-9147-4f5a-bf73-7d86cf4298da" containerID="05fd0c732f6d63146451b147c99067b0a248cffd9a8b19a504b668e33e1434f9" exitCode=0 Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.342137 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.342840 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ce4f4582-9147-4f5a-bf73-7d86cf4298da","Type":"ContainerDied","Data":"05fd0c732f6d63146451b147c99067b0a248cffd9a8b19a504b668e33e1434f9"} Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.342969 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ce4f4582-9147-4f5a-bf73-7d86cf4298da","Type":"ContainerDied","Data":"eb3f7acad6609269b84b8637332c8e4acf3753f4a27fa3178e1d454caf4e4081"} Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.343015 4747 scope.go:117] "RemoveContainer" containerID="05fd0c732f6d63146451b147c99067b0a248cffd9a8b19a504b668e33e1434f9" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.351317 4747 generic.go:334] "Generic (PLEG): container finished" podID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerID="bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd" exitCode=0 Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.351518 4747 generic.go:334] "Generic (PLEG): container finished" podID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerID="d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd" exitCode=2 Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.351602 4747 generic.go:334] "Generic (PLEG): container finished" podID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerID="f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5" exitCode=0 Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.351668 4747 generic.go:334] "Generic (PLEG): container finished" podID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerID="1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576" exitCode=0 Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.351410 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.351434 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f48416ba-24f9-4d75-921c-6ac3f9f6396e","Type":"ContainerDied","Data":"bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd"} Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.352703 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f48416ba-24f9-4d75-921c-6ac3f9f6396e","Type":"ContainerDied","Data":"d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd"} Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.352775 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f48416ba-24f9-4d75-921c-6ac3f9f6396e","Type":"ContainerDied","Data":"f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5"} Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.352865 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f48416ba-24f9-4d75-921c-6ac3f9f6396e","Type":"ContainerDied","Data":"1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576"} Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.352977 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f48416ba-24f9-4d75-921c-6ac3f9f6396e","Type":"ContainerDied","Data":"462c66b1d3da2289a55d848373ce9ce041ffca1c6ae3d67283f4a59dd34ac68c"} Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.379132 4747 scope.go:117] "RemoveContainer" containerID="94e08e3f8fde06a4189e486e1d074482757272ead4c513a89348683519c71528" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.386754 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.398568 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.414113 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f48416ba-24f9-4d75-921c-6ac3f9f6396e-log-httpd\") pod \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.414223 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twfpd\" (UniqueName: \"kubernetes.io/projected/f48416ba-24f9-4d75-921c-6ac3f9f6396e-kube-api-access-twfpd\") pod \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.414269 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-sg-core-conf-yaml\") pod \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.414308 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-scripts\") pod \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.414439 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-combined-ca-bundle\") pod \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.414538 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f48416ba-24f9-4d75-921c-6ac3f9f6396e-run-httpd\") pod \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.414562 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-config-data\") pod \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\" (UID: \"f48416ba-24f9-4d75-921c-6ac3f9f6396e\") " Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.415026 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f48416ba-24f9-4d75-921c-6ac3f9f6396e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f48416ba-24f9-4d75-921c-6ac3f9f6396e" (UID: "f48416ba-24f9-4d75-921c-6ac3f9f6396e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.419086 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f48416ba-24f9-4d75-921c-6ac3f9f6396e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f48416ba-24f9-4d75-921c-6ac3f9f6396e" (UID: "f48416ba-24f9-4d75-921c-6ac3f9f6396e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.421148 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 17:04:52 crc kubenswrapper[4747]: E1202 17:04:52.421643 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerName="ceilometer-notification-agent" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.421657 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerName="ceilometer-notification-agent" Dec 02 17:04:52 crc kubenswrapper[4747]: E1202 17:04:52.421672 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerName="ceilometer-central-agent" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.421678 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerName="ceilometer-central-agent" Dec 02 17:04:52 crc kubenswrapper[4747]: E1202 17:04:52.421685 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baa3122f-58e4-4adb-b773-1207e27ed51f" containerName="mariadb-database-create" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.421692 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="baa3122f-58e4-4adb-b773-1207e27ed51f" containerName="mariadb-database-create" Dec 02 17:04:52 crc kubenswrapper[4747]: E1202 17:04:52.421706 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerName="sg-core" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.421713 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerName="sg-core" Dec 02 17:04:52 crc kubenswrapper[4747]: E1202 17:04:52.421724 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerName="proxy-httpd" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.421729 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerName="proxy-httpd" Dec 02 17:04:52 crc kubenswrapper[4747]: E1202 17:04:52.421752 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eff0029d-2d34-42be-b5f0-fd49f3f6b3ac" containerName="mariadb-database-create" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.421759 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="eff0029d-2d34-42be-b5f0-fd49f3f6b3ac" containerName="mariadb-database-create" Dec 02 17:04:52 crc kubenswrapper[4747]: E1202 17:04:52.421772 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce4f4582-9147-4f5a-bf73-7d86cf4298da" containerName="glance-httpd" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.421778 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce4f4582-9147-4f5a-bf73-7d86cf4298da" containerName="glance-httpd" Dec 02 17:04:52 crc kubenswrapper[4747]: E1202 17:04:52.421792 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce4f4582-9147-4f5a-bf73-7d86cf4298da" containerName="glance-log" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.421798 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce4f4582-9147-4f5a-bf73-7d86cf4298da" containerName="glance-log" Dec 02 17:04:52 crc kubenswrapper[4747]: E1202 17:04:52.421807 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a171e5ef-7316-4dc0-a40c-9bac98d8f28a" containerName="mariadb-database-create" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.421813 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a171e5ef-7316-4dc0-a40c-9bac98d8f28a" containerName="mariadb-database-create" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.422032 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce4f4582-9147-4f5a-bf73-7d86cf4298da" containerName="glance-httpd" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.422046 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="baa3122f-58e4-4adb-b773-1207e27ed51f" containerName="mariadb-database-create" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.422054 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerName="ceilometer-central-agent" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.422066 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce4f4582-9147-4f5a-bf73-7d86cf4298da" containerName="glance-log" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.422076 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a171e5ef-7316-4dc0-a40c-9bac98d8f28a" containerName="mariadb-database-create" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.422093 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerName="proxy-httpd" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.422104 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerName="sg-core" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.422114 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" containerName="ceilometer-notification-agent" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.422122 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="eff0029d-2d34-42be-b5f0-fd49f3f6b3ac" containerName="mariadb-database-create" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.423117 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.428921 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.430937 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.441488 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-scripts" (OuterVolumeSpecName: "scripts") pod "f48416ba-24f9-4d75-921c-6ac3f9f6396e" (UID: "f48416ba-24f9-4d75-921c-6ac3f9f6396e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.442623 4747 scope.go:117] "RemoveContainer" containerID="05fd0c732f6d63146451b147c99067b0a248cffd9a8b19a504b668e33e1434f9" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.442814 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f48416ba-24f9-4d75-921c-6ac3f9f6396e-kube-api-access-twfpd" (OuterVolumeSpecName: "kube-api-access-twfpd") pod "f48416ba-24f9-4d75-921c-6ac3f9f6396e" (UID: "f48416ba-24f9-4d75-921c-6ac3f9f6396e"). InnerVolumeSpecName "kube-api-access-twfpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.443324 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.474260 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f48416ba-24f9-4d75-921c-6ac3f9f6396e" (UID: "f48416ba-24f9-4d75-921c-6ac3f9f6396e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:52 crc kubenswrapper[4747]: E1202 17:04:52.474369 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05fd0c732f6d63146451b147c99067b0a248cffd9a8b19a504b668e33e1434f9\": container with ID starting with 05fd0c732f6d63146451b147c99067b0a248cffd9a8b19a504b668e33e1434f9 not found: ID does not exist" containerID="05fd0c732f6d63146451b147c99067b0a248cffd9a8b19a504b668e33e1434f9" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.474429 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05fd0c732f6d63146451b147c99067b0a248cffd9a8b19a504b668e33e1434f9"} err="failed to get container status \"05fd0c732f6d63146451b147c99067b0a248cffd9a8b19a504b668e33e1434f9\": rpc error: code = NotFound desc = could not find container \"05fd0c732f6d63146451b147c99067b0a248cffd9a8b19a504b668e33e1434f9\": container with ID starting with 05fd0c732f6d63146451b147c99067b0a248cffd9a8b19a504b668e33e1434f9 not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.474467 4747 scope.go:117] "RemoveContainer" containerID="94e08e3f8fde06a4189e486e1d074482757272ead4c513a89348683519c71528" Dec 02 17:04:52 crc kubenswrapper[4747]: E1202 17:04:52.480891 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94e08e3f8fde06a4189e486e1d074482757272ead4c513a89348683519c71528\": container with ID starting with 94e08e3f8fde06a4189e486e1d074482757272ead4c513a89348683519c71528 not found: ID does not exist" containerID="94e08e3f8fde06a4189e486e1d074482757272ead4c513a89348683519c71528" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.480965 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94e08e3f8fde06a4189e486e1d074482757272ead4c513a89348683519c71528"} err="failed to get container status \"94e08e3f8fde06a4189e486e1d074482757272ead4c513a89348683519c71528\": rpc error: code = NotFound desc = could not find container \"94e08e3f8fde06a4189e486e1d074482757272ead4c513a89348683519c71528\": container with ID starting with 94e08e3f8fde06a4189e486e1d074482757272ead4c513a89348683519c71528 not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.480996 4747 scope.go:117] "RemoveContainer" containerID="bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.517480 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab63cfa7-2c4a-41cb-8e9e-707d84886893-scripts\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.517898 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnz4b\" (UniqueName: \"kubernetes.io/projected/ab63cfa7-2c4a-41cb-8e9e-707d84886893-kube-api-access-nnz4b\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.518512 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ab63cfa7-2c4a-41cb-8e9e-707d84886893-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.518753 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab63cfa7-2c4a-41cb-8e9e-707d84886893-logs\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.519009 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.519219 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab63cfa7-2c4a-41cb-8e9e-707d84886893-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.519310 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab63cfa7-2c4a-41cb-8e9e-707d84886893-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.519431 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab63cfa7-2c4a-41cb-8e9e-707d84886893-config-data\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.519694 4747 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f48416ba-24f9-4d75-921c-6ac3f9f6396e-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.519803 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twfpd\" (UniqueName: \"kubernetes.io/projected/f48416ba-24f9-4d75-921c-6ac3f9f6396e-kube-api-access-twfpd\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.519874 4747 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.519977 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.520046 4747 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f48416ba-24f9-4d75-921c-6ac3f9f6396e-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.568631 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f48416ba-24f9-4d75-921c-6ac3f9f6396e" (UID: "f48416ba-24f9-4d75-921c-6ac3f9f6396e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.580017 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-config-data" (OuterVolumeSpecName: "config-data") pod "f48416ba-24f9-4d75-921c-6ac3f9f6396e" (UID: "f48416ba-24f9-4d75-921c-6ac3f9f6396e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.622564 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab63cfa7-2c4a-41cb-8e9e-707d84886893-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.622609 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab63cfa7-2c4a-41cb-8e9e-707d84886893-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.622640 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab63cfa7-2c4a-41cb-8e9e-707d84886893-config-data\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.622688 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab63cfa7-2c4a-41cb-8e9e-707d84886893-scripts\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.622705 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnz4b\" (UniqueName: \"kubernetes.io/projected/ab63cfa7-2c4a-41cb-8e9e-707d84886893-kube-api-access-nnz4b\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.622739 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ab63cfa7-2c4a-41cb-8e9e-707d84886893-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.622775 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab63cfa7-2c4a-41cb-8e9e-707d84886893-logs\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.623396 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ab63cfa7-2c4a-41cb-8e9e-707d84886893-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.623454 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.623469 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab63cfa7-2c4a-41cb-8e9e-707d84886893-logs\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.623532 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.623545 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f48416ba-24f9-4d75-921c-6ac3f9f6396e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.623778 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.630853 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab63cfa7-2c4a-41cb-8e9e-707d84886893-scripts\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.632048 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab63cfa7-2c4a-41cb-8e9e-707d84886893-config-data\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.632729 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab63cfa7-2c4a-41cb-8e9e-707d84886893-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.637031 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab63cfa7-2c4a-41cb-8e9e-707d84886893-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.682215 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnz4b\" (UniqueName: \"kubernetes.io/projected/ab63cfa7-2c4a-41cb-8e9e-707d84886893-kube-api-access-nnz4b\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.694039 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"ab63cfa7-2c4a-41cb-8e9e-707d84886893\") " pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.746497 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.758304 4747 scope.go:117] "RemoveContainer" containerID="d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.776326 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.803007 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.805754 4747 scope.go:117] "RemoveContainer" containerID="f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.810853 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.813655 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.818331 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.818456 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.821819 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.841290 4747 scope.go:117] "RemoveContainer" containerID="1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.894654 4747 scope.go:117] "RemoveContainer" containerID="bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd" Dec 02 17:04:52 crc kubenswrapper[4747]: E1202 17:04:52.895331 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd\": container with ID starting with bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd not found: ID does not exist" containerID="bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.895380 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd"} err="failed to get container status \"bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd\": rpc error: code = NotFound desc = could not find container \"bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd\": container with ID starting with bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.895415 4747 scope.go:117] "RemoveContainer" containerID="d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd" Dec 02 17:04:52 crc kubenswrapper[4747]: E1202 17:04:52.896027 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd\": container with ID starting with d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd not found: ID does not exist" containerID="d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.896051 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd"} err="failed to get container status \"d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd\": rpc error: code = NotFound desc = could not find container \"d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd\": container with ID starting with d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.896067 4747 scope.go:117] "RemoveContainer" containerID="f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5" Dec 02 17:04:52 crc kubenswrapper[4747]: E1202 17:04:52.896411 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5\": container with ID starting with f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5 not found: ID does not exist" containerID="f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.896443 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5"} err="failed to get container status \"f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5\": rpc error: code = NotFound desc = could not find container \"f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5\": container with ID starting with f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5 not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.896466 4747 scope.go:117] "RemoveContainer" containerID="1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576" Dec 02 17:04:52 crc kubenswrapper[4747]: E1202 17:04:52.896736 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576\": container with ID starting with 1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576 not found: ID does not exist" containerID="1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.896760 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576"} err="failed to get container status \"1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576\": rpc error: code = NotFound desc = could not find container \"1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576\": container with ID starting with 1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576 not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.896775 4747 scope.go:117] "RemoveContainer" containerID="bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.897396 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd"} err="failed to get container status \"bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd\": rpc error: code = NotFound desc = could not find container \"bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd\": container with ID starting with bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.897419 4747 scope.go:117] "RemoveContainer" containerID="d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.897898 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd"} err="failed to get container status \"d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd\": rpc error: code = NotFound desc = could not find container \"d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd\": container with ID starting with d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.897934 4747 scope.go:117] "RemoveContainer" containerID="f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.898502 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5"} err="failed to get container status \"f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5\": rpc error: code = NotFound desc = could not find container \"f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5\": container with ID starting with f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5 not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.898531 4747 scope.go:117] "RemoveContainer" containerID="1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.898812 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576"} err="failed to get container status \"1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576\": rpc error: code = NotFound desc = could not find container \"1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576\": container with ID starting with 1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576 not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.898849 4747 scope.go:117] "RemoveContainer" containerID="bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.899268 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd"} err="failed to get container status \"bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd\": rpc error: code = NotFound desc = could not find container \"bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd\": container with ID starting with bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.899289 4747 scope.go:117] "RemoveContainer" containerID="d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.899502 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd"} err="failed to get container status \"d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd\": rpc error: code = NotFound desc = could not find container \"d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd\": container with ID starting with d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.899522 4747 scope.go:117] "RemoveContainer" containerID="f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.902483 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5"} err="failed to get container status \"f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5\": rpc error: code = NotFound desc = could not find container \"f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5\": container with ID starting with f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5 not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.902510 4747 scope.go:117] "RemoveContainer" containerID="1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.904106 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576"} err="failed to get container status \"1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576\": rpc error: code = NotFound desc = could not find container \"1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576\": container with ID starting with 1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576 not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.904143 4747 scope.go:117] "RemoveContainer" containerID="bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.904793 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd"} err="failed to get container status \"bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd\": rpc error: code = NotFound desc = could not find container \"bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd\": container with ID starting with bb0ce9d206983426e2c20bfb3d620de1bec46cb14d031b026862e52dc91adcdd not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.904811 4747 scope.go:117] "RemoveContainer" containerID="d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.908171 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd"} err="failed to get container status \"d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd\": rpc error: code = NotFound desc = could not find container \"d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd\": container with ID starting with d7ad20de87806f472430dd23c3686e63856571ce263cde697a0c3fa4ed6976bd not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.908222 4747 scope.go:117] "RemoveContainer" containerID="f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.908624 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5"} err="failed to get container status \"f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5\": rpc error: code = NotFound desc = could not find container \"f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5\": container with ID starting with f62d5f0da15821618ef1e3817d483790a578e115b0cb9de8871cb495882f64d5 not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.908644 4747 scope.go:117] "RemoveContainer" containerID="1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.908873 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576"} err="failed to get container status \"1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576\": rpc error: code = NotFound desc = could not find container \"1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576\": container with ID starting with 1d426a46c2eff1a94d8a54c17cdbd9e8924e762c084bfe82d7515d983b429576 not found: ID does not exist" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.929802 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49rmn\" (UniqueName: \"kubernetes.io/projected/6e988f65-42bb-433f-9d06-51fc725af6ce-kube-api-access-49rmn\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.929923 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.930037 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-scripts\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.930067 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e988f65-42bb-433f-9d06-51fc725af6ce-run-httpd\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.930083 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.930102 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-config-data\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:52 crc kubenswrapper[4747]: I1202 17:04:52.930131 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e988f65-42bb-433f-9d06-51fc725af6ce-log-httpd\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.032279 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e988f65-42bb-433f-9d06-51fc725af6ce-log-httpd\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.032818 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49rmn\" (UniqueName: \"kubernetes.io/projected/6e988f65-42bb-433f-9d06-51fc725af6ce-kube-api-access-49rmn\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.032970 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.033005 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-scripts\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.033029 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e988f65-42bb-433f-9d06-51fc725af6ce-run-httpd\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.033052 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.033082 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-config-data\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.034218 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e988f65-42bb-433f-9d06-51fc725af6ce-run-httpd\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.034430 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e988f65-42bb-433f-9d06-51fc725af6ce-log-httpd\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.044857 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.045018 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-config-data\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.045295 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.045867 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-scripts\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.052754 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49rmn\" (UniqueName: \"kubernetes.io/projected/6e988f65-42bb-433f-9d06-51fc725af6ce-kube-api-access-49rmn\") pod \"ceilometer-0\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " pod="openstack/ceilometer-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.164567 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.168752 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.236449 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-combined-ca-bundle\") pod \"649da975-bf0f-4472-83e6-abcf48197293\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.236646 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"649da975-bf0f-4472-83e6-abcf48197293\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.236726 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/649da975-bf0f-4472-83e6-abcf48197293-httpd-run\") pod \"649da975-bf0f-4472-83e6-abcf48197293\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.236770 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-config-data\") pod \"649da975-bf0f-4472-83e6-abcf48197293\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.236815 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-internal-tls-certs\") pod \"649da975-bf0f-4472-83e6-abcf48197293\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.236872 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-scripts\") pod \"649da975-bf0f-4472-83e6-abcf48197293\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.236928 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/649da975-bf0f-4472-83e6-abcf48197293-logs\") pod \"649da975-bf0f-4472-83e6-abcf48197293\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.236990 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rxpt\" (UniqueName: \"kubernetes.io/projected/649da975-bf0f-4472-83e6-abcf48197293-kube-api-access-6rxpt\") pod \"649da975-bf0f-4472-83e6-abcf48197293\" (UID: \"649da975-bf0f-4472-83e6-abcf48197293\") " Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.237583 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/649da975-bf0f-4472-83e6-abcf48197293-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "649da975-bf0f-4472-83e6-abcf48197293" (UID: "649da975-bf0f-4472-83e6-abcf48197293"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.237891 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/649da975-bf0f-4472-83e6-abcf48197293-logs" (OuterVolumeSpecName: "logs") pod "649da975-bf0f-4472-83e6-abcf48197293" (UID: "649da975-bf0f-4472-83e6-abcf48197293"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.242028 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "649da975-bf0f-4472-83e6-abcf48197293" (UID: "649da975-bf0f-4472-83e6-abcf48197293"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.242327 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-scripts" (OuterVolumeSpecName: "scripts") pod "649da975-bf0f-4472-83e6-abcf48197293" (UID: "649da975-bf0f-4472-83e6-abcf48197293"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.245300 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/649da975-bf0f-4472-83e6-abcf48197293-kube-api-access-6rxpt" (OuterVolumeSpecName: "kube-api-access-6rxpt") pod "649da975-bf0f-4472-83e6-abcf48197293" (UID: "649da975-bf0f-4472-83e6-abcf48197293"). InnerVolumeSpecName "kube-api-access-6rxpt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.272105 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "649da975-bf0f-4472-83e6-abcf48197293" (UID: "649da975-bf0f-4472-83e6-abcf48197293"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.343214 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.343262 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/649da975-bf0f-4472-83e6-abcf48197293-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.343274 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.343286 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/649da975-bf0f-4472-83e6-abcf48197293-logs\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.343301 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rxpt\" (UniqueName: \"kubernetes.io/projected/649da975-bf0f-4472-83e6-abcf48197293-kube-api-access-6rxpt\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.343317 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.360255 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-config-data" (OuterVolumeSpecName: "config-data") pod "649da975-bf0f-4472-83e6-abcf48197293" (UID: "649da975-bf0f-4472-83e6-abcf48197293"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.379868 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-8d88-account-create-4b86z"] Dec 02 17:04:53 crc kubenswrapper[4747]: E1202 17:04:53.380380 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="649da975-bf0f-4472-83e6-abcf48197293" containerName="glance-log" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.380399 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="649da975-bf0f-4472-83e6-abcf48197293" containerName="glance-log" Dec 02 17:04:53 crc kubenswrapper[4747]: E1202 17:04:53.380438 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="649da975-bf0f-4472-83e6-abcf48197293" containerName="glance-httpd" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.380446 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="649da975-bf0f-4472-83e6-abcf48197293" containerName="glance-httpd" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.380636 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="649da975-bf0f-4472-83e6-abcf48197293" containerName="glance-httpd" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.380671 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="649da975-bf0f-4472-83e6-abcf48197293" containerName="glance-log" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.381498 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8d88-account-create-4b86z" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.386429 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.395540 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "649da975-bf0f-4472-83e6-abcf48197293" (UID: "649da975-bf0f-4472-83e6-abcf48197293"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.416134 4747 generic.go:334] "Generic (PLEG): container finished" podID="b26d33d5-1b96-470b-8677-cb5273c72d25" containerID="a23dd2f8d5c9c22dba3575c453fe1e808eefeb21f0eec73a7710b1121e1f288b" exitCode=137 Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.416221 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cc747446d-fcnwd" event={"ID":"b26d33d5-1b96-470b-8677-cb5273c72d25","Type":"ContainerDied","Data":"a23dd2f8d5c9c22dba3575c453fe1e808eefeb21f0eec73a7710b1121e1f288b"} Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.422971 4747 generic.go:334] "Generic (PLEG): container finished" podID="649da975-bf0f-4472-83e6-abcf48197293" containerID="4906302f495247f2072be155905a50ec447e50a35debab91dca5aab3c23e8940" exitCode=0 Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.423049 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"649da975-bf0f-4472-83e6-abcf48197293","Type":"ContainerDied","Data":"4906302f495247f2072be155905a50ec447e50a35debab91dca5aab3c23e8940"} Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.423087 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"649da975-bf0f-4472-83e6-abcf48197293","Type":"ContainerDied","Data":"82f3a1173bc0226ea7adae9f763f04e9efefbd32f0af5adaf1ed3c0ec32095c2"} Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.423126 4747 scope.go:117] "RemoveContainer" containerID="4906302f495247f2072be155905a50ec447e50a35debab91dca5aab3c23e8940" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.423262 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.423800 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-8d88-account-create-4b86z"] Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.434932 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.447266 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhppr\" (UniqueName: \"kubernetes.io/projected/706d747c-f59b-46d1-aa90-d759e3a0f170-kube-api-access-fhppr\") pod \"nova-cell1-8d88-account-create-4b86z\" (UID: \"706d747c-f59b-46d1-aa90-d759e3a0f170\") " pod="openstack/nova-cell1-8d88-account-create-4b86z" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.447468 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.447483 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.447494 4747 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/649da975-bf0f-4472-83e6-abcf48197293-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.465659 4747 scope.go:117] "RemoveContainer" containerID="1aa6c65fa3c889db4c4d07dfde2aea060ad6f2269e943afaf7fee37504b74a3c" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.549631 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhppr\" (UniqueName: \"kubernetes.io/projected/706d747c-f59b-46d1-aa90-d759e3a0f170-kube-api-access-fhppr\") pod \"nova-cell1-8d88-account-create-4b86z\" (UID: \"706d747c-f59b-46d1-aa90-d759e3a0f170\") " pod="openstack/nova-cell1-8d88-account-create-4b86z" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.579055 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.600732 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhppr\" (UniqueName: \"kubernetes.io/projected/706d747c-f59b-46d1-aa90-d759e3a0f170-kube-api-access-fhppr\") pod \"nova-cell1-8d88-account-create-4b86z\" (UID: \"706d747c-f59b-46d1-aa90-d759e3a0f170\") " pod="openstack/nova-cell1-8d88-account-create-4b86z" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.618347 4747 scope.go:117] "RemoveContainer" containerID="4906302f495247f2072be155905a50ec447e50a35debab91dca5aab3c23e8940" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.625459 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 17:04:53 crc kubenswrapper[4747]: E1202 17:04:53.643943 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4906302f495247f2072be155905a50ec447e50a35debab91dca5aab3c23e8940\": container with ID starting with 4906302f495247f2072be155905a50ec447e50a35debab91dca5aab3c23e8940 not found: ID does not exist" containerID="4906302f495247f2072be155905a50ec447e50a35debab91dca5aab3c23e8940" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.643992 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4906302f495247f2072be155905a50ec447e50a35debab91dca5aab3c23e8940"} err="failed to get container status \"4906302f495247f2072be155905a50ec447e50a35debab91dca5aab3c23e8940\": rpc error: code = NotFound desc = could not find container \"4906302f495247f2072be155905a50ec447e50a35debab91dca5aab3c23e8940\": container with ID starting with 4906302f495247f2072be155905a50ec447e50a35debab91dca5aab3c23e8940 not found: ID does not exist" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.644021 4747 scope.go:117] "RemoveContainer" containerID="1aa6c65fa3c889db4c4d07dfde2aea060ad6f2269e943afaf7fee37504b74a3c" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.648873 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.659255 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.662543 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: E1202 17:04:53.663627 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1aa6c65fa3c889db4c4d07dfde2aea060ad6f2269e943afaf7fee37504b74a3c\": container with ID starting with 1aa6c65fa3c889db4c4d07dfde2aea060ad6f2269e943afaf7fee37504b74a3c not found: ID does not exist" containerID="1aa6c65fa3c889db4c4d07dfde2aea060ad6f2269e943afaf7fee37504b74a3c" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.663697 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1aa6c65fa3c889db4c4d07dfde2aea060ad6f2269e943afaf7fee37504b74a3c"} err="failed to get container status \"1aa6c65fa3c889db4c4d07dfde2aea060ad6f2269e943afaf7fee37504b74a3c\": rpc error: code = NotFound desc = could not find container \"1aa6c65fa3c889db4c4d07dfde2aea060ad6f2269e943afaf7fee37504b74a3c\": container with ID starting with 1aa6c65fa3c889db4c4d07dfde2aea060ad6f2269e943afaf7fee37504b74a3c not found: ID does not exist" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.674062 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.695511 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.703409 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.716777 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8d88-account-create-4b86z" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.789008 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="649da975-bf0f-4472-83e6-abcf48197293" path="/var/lib/kubelet/pods/649da975-bf0f-4472-83e6-abcf48197293/volumes" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.790331 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce4f4582-9147-4f5a-bf73-7d86cf4298da" path="/var/lib/kubelet/pods/ce4f4582-9147-4f5a-bf73-7d86cf4298da/volumes" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.791377 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f48416ba-24f9-4d75-921c-6ac3f9f6396e" path="/var/lib/kubelet/pods/f48416ba-24f9-4d75-921c-6ac3f9f6396e/volumes" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.866209 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x58zq\" (UniqueName: \"kubernetes.io/projected/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-kube-api-access-x58zq\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.866309 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.866372 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.866413 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-logs\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.866494 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.866541 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.866613 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.866645 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.888314 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.970331 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.970378 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-logs\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.970432 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.970464 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.970518 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.970544 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.970576 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x58zq\" (UniqueName: \"kubernetes.io/projected/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-kube-api-access-x58zq\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.970609 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.972793 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.972816 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-logs\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.986600 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:53 crc kubenswrapper[4747]: I1202 17:04:53.999078 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.000142 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.011337 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.013930 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.041726 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.049726 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x58zq\" (UniqueName: \"kubernetes.io/projected/f419079c-56d7-40cf-bfcb-7cf6a43c44ed-kube-api-access-x58zq\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.066222 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"f419079c-56d7-40cf-bfcb-7cf6a43c44ed\") " pod="openstack/glance-default-internal-api-0" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.071822 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b26d33d5-1b96-470b-8677-cb5273c72d25-logs\") pod \"b26d33d5-1b96-470b-8677-cb5273c72d25\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.071888 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-horizon-secret-key\") pod \"b26d33d5-1b96-470b-8677-cb5273c72d25\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.072005 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-horizon-tls-certs\") pod \"b26d33d5-1b96-470b-8677-cb5273c72d25\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.072062 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-combined-ca-bundle\") pod \"b26d33d5-1b96-470b-8677-cb5273c72d25\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.072096 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b26d33d5-1b96-470b-8677-cb5273c72d25-config-data\") pod \"b26d33d5-1b96-470b-8677-cb5273c72d25\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.072131 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b26d33d5-1b96-470b-8677-cb5273c72d25-scripts\") pod \"b26d33d5-1b96-470b-8677-cb5273c72d25\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.072181 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89kzm\" (UniqueName: \"kubernetes.io/projected/b26d33d5-1b96-470b-8677-cb5273c72d25-kube-api-access-89kzm\") pod \"b26d33d5-1b96-470b-8677-cb5273c72d25\" (UID: \"b26d33d5-1b96-470b-8677-cb5273c72d25\") " Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.077175 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "b26d33d5-1b96-470b-8677-cb5273c72d25" (UID: "b26d33d5-1b96-470b-8677-cb5273c72d25"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.077335 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b26d33d5-1b96-470b-8677-cb5273c72d25-logs" (OuterVolumeSpecName: "logs") pod "b26d33d5-1b96-470b-8677-cb5273c72d25" (UID: "b26d33d5-1b96-470b-8677-cb5273c72d25"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.082933 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b26d33d5-1b96-470b-8677-cb5273c72d25-kube-api-access-89kzm" (OuterVolumeSpecName: "kube-api-access-89kzm") pod "b26d33d5-1b96-470b-8677-cb5273c72d25" (UID: "b26d33d5-1b96-470b-8677-cb5273c72d25"). InnerVolumeSpecName "kube-api-access-89kzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.104423 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b26d33d5-1b96-470b-8677-cb5273c72d25-scripts" (OuterVolumeSpecName: "scripts") pod "b26d33d5-1b96-470b-8677-cb5273c72d25" (UID: "b26d33d5-1b96-470b-8677-cb5273c72d25"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.144766 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b26d33d5-1b96-470b-8677-cb5273c72d25-config-data" (OuterVolumeSpecName: "config-data") pod "b26d33d5-1b96-470b-8677-cb5273c72d25" (UID: "b26d33d5-1b96-470b-8677-cb5273c72d25"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.160145 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b26d33d5-1b96-470b-8677-cb5273c72d25" (UID: "b26d33d5-1b96-470b-8677-cb5273c72d25"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.176352 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b26d33d5-1b96-470b-8677-cb5273c72d25-logs\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.176716 4747 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.176732 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.176744 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b26d33d5-1b96-470b-8677-cb5273c72d25-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.176756 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b26d33d5-1b96-470b-8677-cb5273c72d25-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.176767 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89kzm\" (UniqueName: \"kubernetes.io/projected/b26d33d5-1b96-470b-8677-cb5273c72d25-kube-api-access-89kzm\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.190097 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "b26d33d5-1b96-470b-8677-cb5273c72d25" (UID: "b26d33d5-1b96-470b-8677-cb5273c72d25"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.278449 4747 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/b26d33d5-1b96-470b-8677-cb5273c72d25-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.323767 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.442456 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ab63cfa7-2c4a-41cb-8e9e-707d84886893","Type":"ContainerStarted","Data":"e211efc91bc3fdafff41adb33891fc7eeb8d2963bda58d0dc795c72ee3823e63"} Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.444361 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cc747446d-fcnwd" event={"ID":"b26d33d5-1b96-470b-8677-cb5273c72d25","Type":"ContainerDied","Data":"d6c0d91e58b272df206e7ead7812af0495044d620559b29a1a0575cad8422a93"} Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.444399 4747 scope.go:117] "RemoveContainer" containerID="92cc25f582af9f955108070de723a136742482bbeaa439305c18cea41ec4b329" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.444500 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5cc747446d-fcnwd" Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.456356 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e988f65-42bb-433f-9d06-51fc725af6ce","Type":"ContainerStarted","Data":"4b2ce3aad54fc129900a35e4c4902389f7dcbacfbcba6bdc6e801df49385a4e1"} Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.591799 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5cc747446d-fcnwd"] Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.632025 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5cc747446d-fcnwd"] Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.645729 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-8d88-account-create-4b86z"] Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.801709 4747 scope.go:117] "RemoveContainer" containerID="a23dd2f8d5c9c22dba3575c453fe1e808eefeb21f0eec73a7710b1121e1f288b" Dec 02 17:04:54 crc kubenswrapper[4747]: W1202 17:04:54.809816 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod706d747c_f59b_46d1_aa90_d759e3a0f170.slice/crio-b4d06b948be960d05e579488ca1612ef3e468e4941ba0c491371fc4581a84324 WatchSource:0}: Error finding container b4d06b948be960d05e579488ca1612ef3e468e4941ba0c491371fc4581a84324: Status 404 returned error can't find the container with id b4d06b948be960d05e579488ca1612ef3e468e4941ba0c491371fc4581a84324 Dec 02 17:04:54 crc kubenswrapper[4747]: I1202 17:04:54.987410 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 17:04:55 crc kubenswrapper[4747]: I1202 17:04:55.488286 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ab63cfa7-2c4a-41cb-8e9e-707d84886893","Type":"ContainerStarted","Data":"4d291c4d159f03ab30ba5bda837ba1089e2918b2104bf354044ba704b405c65a"} Dec 02 17:04:55 crc kubenswrapper[4747]: I1202 17:04:55.497188 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f419079c-56d7-40cf-bfcb-7cf6a43c44ed","Type":"ContainerStarted","Data":"7b52537b026baf7c8d3fc5b3821d4af8b912ff2962d3b3e7467e697aaba858db"} Dec 02 17:04:55 crc kubenswrapper[4747]: I1202 17:04:55.508348 4747 generic.go:334] "Generic (PLEG): container finished" podID="706d747c-f59b-46d1-aa90-d759e3a0f170" containerID="efa384a3d44fea1d05f430229d4947fcb4ff59fa8b4abd0ef6bb470a6324743d" exitCode=0 Dec 02 17:04:55 crc kubenswrapper[4747]: I1202 17:04:55.508419 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8d88-account-create-4b86z" event={"ID":"706d747c-f59b-46d1-aa90-d759e3a0f170","Type":"ContainerDied","Data":"efa384a3d44fea1d05f430229d4947fcb4ff59fa8b4abd0ef6bb470a6324743d"} Dec 02 17:04:55 crc kubenswrapper[4747]: I1202 17:04:55.508447 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8d88-account-create-4b86z" event={"ID":"706d747c-f59b-46d1-aa90-d759e3a0f170","Type":"ContainerStarted","Data":"b4d06b948be960d05e579488ca1612ef3e468e4941ba0c491371fc4581a84324"} Dec 02 17:04:55 crc kubenswrapper[4747]: I1202 17:04:55.511078 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e988f65-42bb-433f-9d06-51fc725af6ce","Type":"ContainerStarted","Data":"0b9b4513baeba27ceba9b9d8ad4055fce19851a6c29c73ee6c76f6994e74c56b"} Dec 02 17:04:55 crc kubenswrapper[4747]: I1202 17:04:55.587579 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:04:55 crc kubenswrapper[4747]: I1202 17:04:55.773626 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b26d33d5-1b96-470b-8677-cb5273c72d25" path="/var/lib/kubelet/pods/b26d33d5-1b96-470b-8677-cb5273c72d25/volumes" Dec 02 17:04:56 crc kubenswrapper[4747]: I1202 17:04:56.525924 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ab63cfa7-2c4a-41cb-8e9e-707d84886893","Type":"ContainerStarted","Data":"07c4bfdc34e0486ee7116f404d2b4a033d8f3d9b21d8bd00953b7f8b436d0b8c"} Dec 02 17:04:56 crc kubenswrapper[4747]: I1202 17:04:56.531847 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f419079c-56d7-40cf-bfcb-7cf6a43c44ed","Type":"ContainerStarted","Data":"c3f042a9b2eb7dc3c2b250efa4656912cde286286675c25a02d22badd235d6dc"} Dec 02 17:04:56 crc kubenswrapper[4747]: I1202 17:04:56.531888 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f419079c-56d7-40cf-bfcb-7cf6a43c44ed","Type":"ContainerStarted","Data":"0865798b9020ebae4e78a1bc3acae8705f3d6c03eadd7fc2707c1f87cf6e7a9e"} Dec 02 17:04:56 crc kubenswrapper[4747]: I1202 17:04:56.553188 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.553162763 podStartE2EDuration="4.553162763s" podCreationTimestamp="2025-12-02 17:04:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:04:56.548836429 +0000 UTC m=+1327.075725178" watchObservedRunningTime="2025-12-02 17:04:56.553162763 +0000 UTC m=+1327.080051512" Dec 02 17:04:56 crc kubenswrapper[4747]: I1202 17:04:56.594139 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.594111704 podStartE2EDuration="3.594111704s" podCreationTimestamp="2025-12-02 17:04:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:04:56.585121427 +0000 UTC m=+1327.112010196" watchObservedRunningTime="2025-12-02 17:04:56.594111704 +0000 UTC m=+1327.121000473" Dec 02 17:04:57 crc kubenswrapper[4747]: I1202 17:04:57.038839 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8d88-account-create-4b86z" Dec 02 17:04:57 crc kubenswrapper[4747]: I1202 17:04:57.161694 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhppr\" (UniqueName: \"kubernetes.io/projected/706d747c-f59b-46d1-aa90-d759e3a0f170-kube-api-access-fhppr\") pod \"706d747c-f59b-46d1-aa90-d759e3a0f170\" (UID: \"706d747c-f59b-46d1-aa90-d759e3a0f170\") " Dec 02 17:04:57 crc kubenswrapper[4747]: I1202 17:04:57.168088 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/706d747c-f59b-46d1-aa90-d759e3a0f170-kube-api-access-fhppr" (OuterVolumeSpecName: "kube-api-access-fhppr") pod "706d747c-f59b-46d1-aa90-d759e3a0f170" (UID: "706d747c-f59b-46d1-aa90-d759e3a0f170"). InnerVolumeSpecName "kube-api-access-fhppr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:04:57 crc kubenswrapper[4747]: I1202 17:04:57.264942 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhppr\" (UniqueName: \"kubernetes.io/projected/706d747c-f59b-46d1-aa90-d759e3a0f170-kube-api-access-fhppr\") on node \"crc\" DevicePath \"\"" Dec 02 17:04:57 crc kubenswrapper[4747]: I1202 17:04:57.569936 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8d88-account-create-4b86z" event={"ID":"706d747c-f59b-46d1-aa90-d759e3a0f170","Type":"ContainerDied","Data":"b4d06b948be960d05e579488ca1612ef3e468e4941ba0c491371fc4581a84324"} Dec 02 17:04:57 crc kubenswrapper[4747]: I1202 17:04:57.570020 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4d06b948be960d05e579488ca1612ef3e468e4941ba0c491371fc4581a84324" Dec 02 17:04:57 crc kubenswrapper[4747]: I1202 17:04:57.570013 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8d88-account-create-4b86z" Dec 02 17:04:57 crc kubenswrapper[4747]: I1202 17:04:57.573417 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e988f65-42bb-433f-9d06-51fc725af6ce","Type":"ContainerStarted","Data":"d04b7189ac09b0e8e922318130e6631808ce615670e86379b8bf5d50019ec651"} Dec 02 17:04:58 crc kubenswrapper[4747]: I1202 17:04:58.585069 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e988f65-42bb-433f-9d06-51fc725af6ce","Type":"ContainerStarted","Data":"142d6ef8a5ec8edd3aa865f21a034bd975245a71b9f9e13ca998c5dfab7f8ec1"} Dec 02 17:05:00 crc kubenswrapper[4747]: I1202 17:05:00.631662 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e988f65-42bb-433f-9d06-51fc725af6ce","Type":"ContainerStarted","Data":"7dfc9e2c4df5d001b642ce79b691f31930f4fc2f03a99a1c21ab5d20f239d225"} Dec 02 17:05:00 crc kubenswrapper[4747]: I1202 17:05:00.636579 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerName="ceilometer-central-agent" containerID="cri-o://0b9b4513baeba27ceba9b9d8ad4055fce19851a6c29c73ee6c76f6994e74c56b" gracePeriod=30 Dec 02 17:05:00 crc kubenswrapper[4747]: I1202 17:05:00.636974 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 17:05:00 crc kubenswrapper[4747]: I1202 17:05:00.637448 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerName="proxy-httpd" containerID="cri-o://7dfc9e2c4df5d001b642ce79b691f31930f4fc2f03a99a1c21ab5d20f239d225" gracePeriod=30 Dec 02 17:05:00 crc kubenswrapper[4747]: I1202 17:05:00.637527 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerName="sg-core" containerID="cri-o://142d6ef8a5ec8edd3aa865f21a034bd975245a71b9f9e13ca998c5dfab7f8ec1" gracePeriod=30 Dec 02 17:05:00 crc kubenswrapper[4747]: I1202 17:05:00.637579 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerName="ceilometer-notification-agent" containerID="cri-o://d04b7189ac09b0e8e922318130e6631808ce615670e86379b8bf5d50019ec651" gracePeriod=30 Dec 02 17:05:00 crc kubenswrapper[4747]: I1202 17:05:00.669871 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.273875123 podStartE2EDuration="8.669847441s" podCreationTimestamp="2025-12-02 17:04:52 +0000 UTC" firstStartedPulling="2025-12-02 17:04:54.076144214 +0000 UTC m=+1324.603032963" lastFinishedPulling="2025-12-02 17:04:59.472116532 +0000 UTC m=+1329.999005281" observedRunningTime="2025-12-02 17:05:00.657789436 +0000 UTC m=+1331.184678185" watchObservedRunningTime="2025-12-02 17:05:00.669847441 +0000 UTC m=+1331.196736190" Dec 02 17:05:01 crc kubenswrapper[4747]: I1202 17:05:01.646486 4747 generic.go:334] "Generic (PLEG): container finished" podID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerID="7dfc9e2c4df5d001b642ce79b691f31930f4fc2f03a99a1c21ab5d20f239d225" exitCode=0 Dec 02 17:05:01 crc kubenswrapper[4747]: I1202 17:05:01.646942 4747 generic.go:334] "Generic (PLEG): container finished" podID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerID="142d6ef8a5ec8edd3aa865f21a034bd975245a71b9f9e13ca998c5dfab7f8ec1" exitCode=2 Dec 02 17:05:01 crc kubenswrapper[4747]: I1202 17:05:01.646959 4747 generic.go:334] "Generic (PLEG): container finished" podID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerID="d04b7189ac09b0e8e922318130e6631808ce615670e86379b8bf5d50019ec651" exitCode=0 Dec 02 17:05:01 crc kubenswrapper[4747]: I1202 17:05:01.646990 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e988f65-42bb-433f-9d06-51fc725af6ce","Type":"ContainerDied","Data":"7dfc9e2c4df5d001b642ce79b691f31930f4fc2f03a99a1c21ab5d20f239d225"} Dec 02 17:05:01 crc kubenswrapper[4747]: I1202 17:05:01.647030 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e988f65-42bb-433f-9d06-51fc725af6ce","Type":"ContainerDied","Data":"142d6ef8a5ec8edd3aa865f21a034bd975245a71b9f9e13ca998c5dfab7f8ec1"} Dec 02 17:05:01 crc kubenswrapper[4747]: I1202 17:05:01.647045 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e988f65-42bb-433f-9d06-51fc725af6ce","Type":"ContainerDied","Data":"d04b7189ac09b0e8e922318130e6631808ce615670e86379b8bf5d50019ec651"} Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.120046 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.171954 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e988f65-42bb-433f-9d06-51fc725af6ce-run-httpd\") pod \"6e988f65-42bb-433f-9d06-51fc725af6ce\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.172305 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49rmn\" (UniqueName: \"kubernetes.io/projected/6e988f65-42bb-433f-9d06-51fc725af6ce-kube-api-access-49rmn\") pod \"6e988f65-42bb-433f-9d06-51fc725af6ce\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.172451 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-config-data\") pod \"6e988f65-42bb-433f-9d06-51fc725af6ce\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.172531 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-scripts\") pod \"6e988f65-42bb-433f-9d06-51fc725af6ce\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.172685 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e988f65-42bb-433f-9d06-51fc725af6ce-log-httpd\") pod \"6e988f65-42bb-433f-9d06-51fc725af6ce\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.172892 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-combined-ca-bundle\") pod \"6e988f65-42bb-433f-9d06-51fc725af6ce\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.172995 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-sg-core-conf-yaml\") pod \"6e988f65-42bb-433f-9d06-51fc725af6ce\" (UID: \"6e988f65-42bb-433f-9d06-51fc725af6ce\") " Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.173107 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e988f65-42bb-433f-9d06-51fc725af6ce-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6e988f65-42bb-433f-9d06-51fc725af6ce" (UID: "6e988f65-42bb-433f-9d06-51fc725af6ce"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.173588 4747 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e988f65-42bb-433f-9d06-51fc725af6ce-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.175696 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e988f65-42bb-433f-9d06-51fc725af6ce-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6e988f65-42bb-433f-9d06-51fc725af6ce" (UID: "6e988f65-42bb-433f-9d06-51fc725af6ce"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.179496 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e988f65-42bb-433f-9d06-51fc725af6ce-kube-api-access-49rmn" (OuterVolumeSpecName: "kube-api-access-49rmn") pod "6e988f65-42bb-433f-9d06-51fc725af6ce" (UID: "6e988f65-42bb-433f-9d06-51fc725af6ce"). InnerVolumeSpecName "kube-api-access-49rmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.179825 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-scripts" (OuterVolumeSpecName: "scripts") pod "6e988f65-42bb-433f-9d06-51fc725af6ce" (UID: "6e988f65-42bb-433f-9d06-51fc725af6ce"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.226062 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6e988f65-42bb-433f-9d06-51fc725af6ce" (UID: "6e988f65-42bb-433f-9d06-51fc725af6ce"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.275360 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49rmn\" (UniqueName: \"kubernetes.io/projected/6e988f65-42bb-433f-9d06-51fc725af6ce-kube-api-access-49rmn\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.275685 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.275767 4747 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6e988f65-42bb-433f-9d06-51fc725af6ce-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.275843 4747 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.285852 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6e988f65-42bb-433f-9d06-51fc725af6ce" (UID: "6e988f65-42bb-433f-9d06-51fc725af6ce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.310638 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-config-data" (OuterVolumeSpecName: "config-data") pod "6e988f65-42bb-433f-9d06-51fc725af6ce" (UID: "6e988f65-42bb-433f-9d06-51fc725af6ce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.377612 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.377659 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e988f65-42bb-433f-9d06-51fc725af6ce-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.660287 4747 generic.go:334] "Generic (PLEG): container finished" podID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerID="0b9b4513baeba27ceba9b9d8ad4055fce19851a6c29c73ee6c76f6994e74c56b" exitCode=0 Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.660359 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e988f65-42bb-433f-9d06-51fc725af6ce","Type":"ContainerDied","Data":"0b9b4513baeba27ceba9b9d8ad4055fce19851a6c29c73ee6c76f6994e74c56b"} Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.660429 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6e988f65-42bb-433f-9d06-51fc725af6ce","Type":"ContainerDied","Data":"4b2ce3aad54fc129900a35e4c4902389f7dcbacfbcba6bdc6e801df49385a4e1"} Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.660379 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.660452 4747 scope.go:117] "RemoveContainer" containerID="7dfc9e2c4df5d001b642ce79b691f31930f4fc2f03a99a1c21ab5d20f239d225" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.692928 4747 scope.go:117] "RemoveContainer" containerID="142d6ef8a5ec8edd3aa865f21a034bd975245a71b9f9e13ca998c5dfab7f8ec1" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.713766 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.715132 4747 scope.go:117] "RemoveContainer" containerID="d04b7189ac09b0e8e922318130e6631808ce615670e86379b8bf5d50019ec651" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.729481 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.740375 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:05:02 crc kubenswrapper[4747]: E1202 17:05:02.740889 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b26d33d5-1b96-470b-8677-cb5273c72d25" containerName="horizon-log" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.740931 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b26d33d5-1b96-470b-8677-cb5273c72d25" containerName="horizon-log" Dec 02 17:05:02 crc kubenswrapper[4747]: E1202 17:05:02.740955 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b26d33d5-1b96-470b-8677-cb5273c72d25" containerName="horizon" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.740963 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b26d33d5-1b96-470b-8677-cb5273c72d25" containerName="horizon" Dec 02 17:05:02 crc kubenswrapper[4747]: E1202 17:05:02.740979 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerName="ceilometer-central-agent" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.740988 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerName="ceilometer-central-agent" Dec 02 17:05:02 crc kubenswrapper[4747]: E1202 17:05:02.741011 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerName="ceilometer-notification-agent" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.741019 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerName="ceilometer-notification-agent" Dec 02 17:05:02 crc kubenswrapper[4747]: E1202 17:05:02.741035 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="706d747c-f59b-46d1-aa90-d759e3a0f170" containerName="mariadb-account-create" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.741045 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="706d747c-f59b-46d1-aa90-d759e3a0f170" containerName="mariadb-account-create" Dec 02 17:05:02 crc kubenswrapper[4747]: E1202 17:05:02.741061 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerName="proxy-httpd" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.741071 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerName="proxy-httpd" Dec 02 17:05:02 crc kubenswrapper[4747]: E1202 17:05:02.741090 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerName="sg-core" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.741099 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerName="sg-core" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.741346 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerName="sg-core" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.741373 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="706d747c-f59b-46d1-aa90-d759e3a0f170" containerName="mariadb-account-create" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.741387 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerName="proxy-httpd" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.741400 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerName="ceilometer-central-agent" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.741412 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b26d33d5-1b96-470b-8677-cb5273c72d25" containerName="horizon" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.741422 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b26d33d5-1b96-470b-8677-cb5273c72d25" containerName="horizon-log" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.741440 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e988f65-42bb-433f-9d06-51fc725af6ce" containerName="ceilometer-notification-agent" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.743631 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.747810 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.747973 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.747977 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.748187 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.749162 4747 scope.go:117] "RemoveContainer" containerID="0b9b4513baeba27ceba9b9d8ad4055fce19851a6c29c73ee6c76f6994e74c56b" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.754642 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.784335 4747 scope.go:117] "RemoveContainer" containerID="7dfc9e2c4df5d001b642ce79b691f31930f4fc2f03a99a1c21ab5d20f239d225" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.788548 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 02 17:05:02 crc kubenswrapper[4747]: E1202 17:05:02.788878 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dfc9e2c4df5d001b642ce79b691f31930f4fc2f03a99a1c21ab5d20f239d225\": container with ID starting with 7dfc9e2c4df5d001b642ce79b691f31930f4fc2f03a99a1c21ab5d20f239d225 not found: ID does not exist" containerID="7dfc9e2c4df5d001b642ce79b691f31930f4fc2f03a99a1c21ab5d20f239d225" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.788926 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dfc9e2c4df5d001b642ce79b691f31930f4fc2f03a99a1c21ab5d20f239d225"} err="failed to get container status \"7dfc9e2c4df5d001b642ce79b691f31930f4fc2f03a99a1c21ab5d20f239d225\": rpc error: code = NotFound desc = could not find container \"7dfc9e2c4df5d001b642ce79b691f31930f4fc2f03a99a1c21ab5d20f239d225\": container with ID starting with 7dfc9e2c4df5d001b642ce79b691f31930f4fc2f03a99a1c21ab5d20f239d225 not found: ID does not exist" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.788956 4747 scope.go:117] "RemoveContainer" containerID="142d6ef8a5ec8edd3aa865f21a034bd975245a71b9f9e13ca998c5dfab7f8ec1" Dec 02 17:05:02 crc kubenswrapper[4747]: E1202 17:05:02.789692 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"142d6ef8a5ec8edd3aa865f21a034bd975245a71b9f9e13ca998c5dfab7f8ec1\": container with ID starting with 142d6ef8a5ec8edd3aa865f21a034bd975245a71b9f9e13ca998c5dfab7f8ec1 not found: ID does not exist" containerID="142d6ef8a5ec8edd3aa865f21a034bd975245a71b9f9e13ca998c5dfab7f8ec1" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.789725 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"142d6ef8a5ec8edd3aa865f21a034bd975245a71b9f9e13ca998c5dfab7f8ec1"} err="failed to get container status \"142d6ef8a5ec8edd3aa865f21a034bd975245a71b9f9e13ca998c5dfab7f8ec1\": rpc error: code = NotFound desc = could not find container \"142d6ef8a5ec8edd3aa865f21a034bd975245a71b9f9e13ca998c5dfab7f8ec1\": container with ID starting with 142d6ef8a5ec8edd3aa865f21a034bd975245a71b9f9e13ca998c5dfab7f8ec1 not found: ID does not exist" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.789747 4747 scope.go:117] "RemoveContainer" containerID="d04b7189ac09b0e8e922318130e6631808ce615670e86379b8bf5d50019ec651" Dec 02 17:05:02 crc kubenswrapper[4747]: E1202 17:05:02.790246 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d04b7189ac09b0e8e922318130e6631808ce615670e86379b8bf5d50019ec651\": container with ID starting with d04b7189ac09b0e8e922318130e6631808ce615670e86379b8bf5d50019ec651 not found: ID does not exist" containerID="d04b7189ac09b0e8e922318130e6631808ce615670e86379b8bf5d50019ec651" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.790287 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d04b7189ac09b0e8e922318130e6631808ce615670e86379b8bf5d50019ec651"} err="failed to get container status \"d04b7189ac09b0e8e922318130e6631808ce615670e86379b8bf5d50019ec651\": rpc error: code = NotFound desc = could not find container \"d04b7189ac09b0e8e922318130e6631808ce615670e86379b8bf5d50019ec651\": container with ID starting with d04b7189ac09b0e8e922318130e6631808ce615670e86379b8bf5d50019ec651 not found: ID does not exist" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.790319 4747 scope.go:117] "RemoveContainer" containerID="0b9b4513baeba27ceba9b9d8ad4055fce19851a6c29c73ee6c76f6994e74c56b" Dec 02 17:05:02 crc kubenswrapper[4747]: E1202 17:05:02.794601 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b9b4513baeba27ceba9b9d8ad4055fce19851a6c29c73ee6c76f6994e74c56b\": container with ID starting with 0b9b4513baeba27ceba9b9d8ad4055fce19851a6c29c73ee6c76f6994e74c56b not found: ID does not exist" containerID="0b9b4513baeba27ceba9b9d8ad4055fce19851a6c29c73ee6c76f6994e74c56b" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.794648 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b9b4513baeba27ceba9b9d8ad4055fce19851a6c29c73ee6c76f6994e74c56b"} err="failed to get container status \"0b9b4513baeba27ceba9b9d8ad4055fce19851a6c29c73ee6c76f6994e74c56b\": rpc error: code = NotFound desc = could not find container \"0b9b4513baeba27ceba9b9d8ad4055fce19851a6c29c73ee6c76f6994e74c56b\": container with ID starting with 0b9b4513baeba27ceba9b9d8ad4055fce19851a6c29c73ee6c76f6994e74c56b not found: ID does not exist" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.800881 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.887363 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.887476 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fda58da2-6365-4820-ba0b-bb84d2db08a3-run-httpd\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.887532 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-scripts\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.887570 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-config-data\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.887623 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5lj4\" (UniqueName: \"kubernetes.io/projected/fda58da2-6365-4820-ba0b-bb84d2db08a3-kube-api-access-d5lj4\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.887705 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fda58da2-6365-4820-ba0b-bb84d2db08a3-log-httpd\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.888035 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.990734 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fda58da2-6365-4820-ba0b-bb84d2db08a3-log-httpd\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.990879 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.990971 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.991002 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fda58da2-6365-4820-ba0b-bb84d2db08a3-run-httpd\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.991034 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-scripts\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.991076 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-config-data\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.991123 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5lj4\" (UniqueName: \"kubernetes.io/projected/fda58da2-6365-4820-ba0b-bb84d2db08a3-kube-api-access-d5lj4\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.991348 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fda58da2-6365-4820-ba0b-bb84d2db08a3-log-httpd\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.992400 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fda58da2-6365-4820-ba0b-bb84d2db08a3-run-httpd\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.995371 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.995693 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-scripts\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.996153 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:02 crc kubenswrapper[4747]: I1202 17:05:02.996796 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-config-data\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.018892 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5lj4\" (UniqueName: \"kubernetes.io/projected/fda58da2-6365-4820-ba0b-bb84d2db08a3-kube-api-access-d5lj4\") pod \"ceilometer-0\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " pod="openstack/ceilometer-0" Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.071140 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.256423 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-baeb-account-create-vmvkj"] Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.258139 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-baeb-account-create-vmvkj" Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.260519 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.265315 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-baeb-account-create-vmvkj"] Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.399669 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpkpq\" (UniqueName: \"kubernetes.io/projected/ae346fbe-0ec8-4b40-9264-afca3b7110ba-kube-api-access-wpkpq\") pod \"nova-api-baeb-account-create-vmvkj\" (UID: \"ae346fbe-0ec8-4b40-9264-afca3b7110ba\") " pod="openstack/nova-api-baeb-account-create-vmvkj" Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.450166 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-6e75-account-create-rs289"] Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.451427 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-6e75-account-create-rs289" Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.456976 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.459710 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-6e75-account-create-rs289"] Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.502035 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qg8xz\" (UniqueName: \"kubernetes.io/projected/5e07ad04-23dd-4c42-bce3-0131ec6ad8f7-kube-api-access-qg8xz\") pod \"nova-cell0-6e75-account-create-rs289\" (UID: \"5e07ad04-23dd-4c42-bce3-0131ec6ad8f7\") " pod="openstack/nova-cell0-6e75-account-create-rs289" Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.502166 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpkpq\" (UniqueName: \"kubernetes.io/projected/ae346fbe-0ec8-4b40-9264-afca3b7110ba-kube-api-access-wpkpq\") pod \"nova-api-baeb-account-create-vmvkj\" (UID: \"ae346fbe-0ec8-4b40-9264-afca3b7110ba\") " pod="openstack/nova-api-baeb-account-create-vmvkj" Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.533138 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpkpq\" (UniqueName: \"kubernetes.io/projected/ae346fbe-0ec8-4b40-9264-afca3b7110ba-kube-api-access-wpkpq\") pod \"nova-api-baeb-account-create-vmvkj\" (UID: \"ae346fbe-0ec8-4b40-9264-afca3b7110ba\") " pod="openstack/nova-api-baeb-account-create-vmvkj" Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.584600 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-baeb-account-create-vmvkj" Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.604316 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qg8xz\" (UniqueName: \"kubernetes.io/projected/5e07ad04-23dd-4c42-bce3-0131ec6ad8f7-kube-api-access-qg8xz\") pod \"nova-cell0-6e75-account-create-rs289\" (UID: \"5e07ad04-23dd-4c42-bce3-0131ec6ad8f7\") " pod="openstack/nova-cell0-6e75-account-create-rs289" Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.630536 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qg8xz\" (UniqueName: \"kubernetes.io/projected/5e07ad04-23dd-4c42-bce3-0131ec6ad8f7-kube-api-access-qg8xz\") pod \"nova-cell0-6e75-account-create-rs289\" (UID: \"5e07ad04-23dd-4c42-bce3-0131ec6ad8f7\") " pod="openstack/nova-cell0-6e75-account-create-rs289" Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.651681 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:05:03 crc kubenswrapper[4747]: W1202 17:05:03.667719 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfda58da2_6365_4820_ba0b_bb84d2db08a3.slice/crio-a350c8d5d8cd5269335a8654f35c2eccc8fb5ed5922ed6d28c9153167281dad2 WatchSource:0}: Error finding container a350c8d5d8cd5269335a8654f35c2eccc8fb5ed5922ed6d28c9153167281dad2: Status 404 returned error can't find the container with id a350c8d5d8cd5269335a8654f35c2eccc8fb5ed5922ed6d28c9153167281dad2 Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.670738 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.670927 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.768019 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-6e75-account-create-rs289" Dec 02 17:05:03 crc kubenswrapper[4747]: I1202 17:05:03.780951 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e988f65-42bb-433f-9d06-51fc725af6ce" path="/var/lib/kubelet/pods/6e988f65-42bb-433f-9d06-51fc725af6ce/volumes" Dec 02 17:05:04 crc kubenswrapper[4747]: I1202 17:05:04.146477 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-baeb-account-create-vmvkj"] Dec 02 17:05:04 crc kubenswrapper[4747]: W1202 17:05:04.288639 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e07ad04_23dd_4c42_bce3_0131ec6ad8f7.slice/crio-0cdc5676ab6cb52abc2290a300786035e9eb35855a63472675fbcfca448c4c79 WatchSource:0}: Error finding container 0cdc5676ab6cb52abc2290a300786035e9eb35855a63472675fbcfca448c4c79: Status 404 returned error can't find the container with id 0cdc5676ab6cb52abc2290a300786035e9eb35855a63472675fbcfca448c4c79 Dec 02 17:05:04 crc kubenswrapper[4747]: I1202 17:05:04.297212 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-6e75-account-create-rs289"] Dec 02 17:05:04 crc kubenswrapper[4747]: I1202 17:05:04.325428 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 02 17:05:04 crc kubenswrapper[4747]: I1202 17:05:04.325486 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 02 17:05:04 crc kubenswrapper[4747]: I1202 17:05:04.392014 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 02 17:05:04 crc kubenswrapper[4747]: I1202 17:05:04.399731 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 02 17:05:04 crc kubenswrapper[4747]: I1202 17:05:04.680660 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fda58da2-6365-4820-ba0b-bb84d2db08a3","Type":"ContainerStarted","Data":"a350c8d5d8cd5269335a8654f35c2eccc8fb5ed5922ed6d28c9153167281dad2"} Dec 02 17:05:04 crc kubenswrapper[4747]: I1202 17:05:04.682630 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-baeb-account-create-vmvkj" event={"ID":"ae346fbe-0ec8-4b40-9264-afca3b7110ba","Type":"ContainerStarted","Data":"0f4666a789008645b1314a63fe0663ae86b2ff283e8283460093250399067f2e"} Dec 02 17:05:04 crc kubenswrapper[4747]: I1202 17:05:04.685051 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-6e75-account-create-rs289" event={"ID":"5e07ad04-23dd-4c42-bce3-0131ec6ad8f7","Type":"ContainerStarted","Data":"0cdc5676ab6cb52abc2290a300786035e9eb35855a63472675fbcfca448c4c79"} Dec 02 17:05:04 crc kubenswrapper[4747]: I1202 17:05:04.685086 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 17:05:04 crc kubenswrapper[4747]: I1202 17:05:04.685102 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 17:05:05 crc kubenswrapper[4747]: I1202 17:05:05.698483 4747 generic.go:334] "Generic (PLEG): container finished" podID="ae346fbe-0ec8-4b40-9264-afca3b7110ba" containerID="d768cfd35706ae5bbe1820042ae7a2f12fefaf56490d48d37bce34880873c494" exitCode=0 Dec 02 17:05:05 crc kubenswrapper[4747]: I1202 17:05:05.698595 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-baeb-account-create-vmvkj" event={"ID":"ae346fbe-0ec8-4b40-9264-afca3b7110ba","Type":"ContainerDied","Data":"d768cfd35706ae5bbe1820042ae7a2f12fefaf56490d48d37bce34880873c494"} Dec 02 17:05:05 crc kubenswrapper[4747]: I1202 17:05:05.703763 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fda58da2-6365-4820-ba0b-bb84d2db08a3","Type":"ContainerStarted","Data":"3f8e54fcace5653c2b747a76082984b457dd030b88b200ff82291e9025583c42"} Dec 02 17:05:05 crc kubenswrapper[4747]: I1202 17:05:05.711066 4747 generic.go:334] "Generic (PLEG): container finished" podID="5e07ad04-23dd-4c42-bce3-0131ec6ad8f7" containerID="0710b4cd7ee17dd73912b3aa3d88cf9c606db2cb971f6d3ff6ae704ff3a2e26a" exitCode=0 Dec 02 17:05:05 crc kubenswrapper[4747]: I1202 17:05:05.711150 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-6e75-account-create-rs289" event={"ID":"5e07ad04-23dd-4c42-bce3-0131ec6ad8f7","Type":"ContainerDied","Data":"0710b4cd7ee17dd73912b3aa3d88cf9c606db2cb971f6d3ff6ae704ff3a2e26a"} Dec 02 17:05:05 crc kubenswrapper[4747]: I1202 17:05:05.907572 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 02 17:05:05 crc kubenswrapper[4747]: I1202 17:05:05.907711 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 17:05:05 crc kubenswrapper[4747]: I1202 17:05:05.989539 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 02 17:05:06 crc kubenswrapper[4747]: I1202 17:05:06.765587 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fda58da2-6365-4820-ba0b-bb84d2db08a3","Type":"ContainerStarted","Data":"bfe302fa571375828f6b1fb632a00d66f712e4586d15c30a1922303874c2f947"} Dec 02 17:05:06 crc kubenswrapper[4747]: I1202 17:05:06.768179 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 17:05:06 crc kubenswrapper[4747]: I1202 17:05:06.768210 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.057250 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.057317 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.425118 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-baeb-account-create-vmvkj" Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.454010 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-6e75-account-create-rs289" Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.527483 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpkpq\" (UniqueName: \"kubernetes.io/projected/ae346fbe-0ec8-4b40-9264-afca3b7110ba-kube-api-access-wpkpq\") pod \"ae346fbe-0ec8-4b40-9264-afca3b7110ba\" (UID: \"ae346fbe-0ec8-4b40-9264-afca3b7110ba\") " Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.538258 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae346fbe-0ec8-4b40-9264-afca3b7110ba-kube-api-access-wpkpq" (OuterVolumeSpecName: "kube-api-access-wpkpq") pod "ae346fbe-0ec8-4b40-9264-afca3b7110ba" (UID: "ae346fbe-0ec8-4b40-9264-afca3b7110ba"). InnerVolumeSpecName "kube-api-access-wpkpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.629572 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg8xz\" (UniqueName: \"kubernetes.io/projected/5e07ad04-23dd-4c42-bce3-0131ec6ad8f7-kube-api-access-qg8xz\") pod \"5e07ad04-23dd-4c42-bce3-0131ec6ad8f7\" (UID: \"5e07ad04-23dd-4c42-bce3-0131ec6ad8f7\") " Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.630565 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpkpq\" (UniqueName: \"kubernetes.io/projected/ae346fbe-0ec8-4b40-9264-afca3b7110ba-kube-api-access-wpkpq\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.636944 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e07ad04-23dd-4c42-bce3-0131ec6ad8f7-kube-api-access-qg8xz" (OuterVolumeSpecName: "kube-api-access-qg8xz") pod "5e07ad04-23dd-4c42-bce3-0131ec6ad8f7" (UID: "5e07ad04-23dd-4c42-bce3-0131ec6ad8f7"). InnerVolumeSpecName "kube-api-access-qg8xz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.731843 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg8xz\" (UniqueName: \"kubernetes.io/projected/5e07ad04-23dd-4c42-bce3-0131ec6ad8f7-kube-api-access-qg8xz\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.793822 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-6e75-account-create-rs289" event={"ID":"5e07ad04-23dd-4c42-bce3-0131ec6ad8f7","Type":"ContainerDied","Data":"0cdc5676ab6cb52abc2290a300786035e9eb35855a63472675fbcfca448c4c79"} Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.793870 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-6e75-account-create-rs289" Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.793876 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0cdc5676ab6cb52abc2290a300786035e9eb35855a63472675fbcfca448c4c79" Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.800305 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-baeb-account-create-vmvkj" Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.801613 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-baeb-account-create-vmvkj" event={"ID":"ae346fbe-0ec8-4b40-9264-afca3b7110ba","Type":"ContainerDied","Data":"0f4666a789008645b1314a63fe0663ae86b2ff283e8283460093250399067f2e"} Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.801663 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f4666a789008645b1314a63fe0663ae86b2ff283e8283460093250399067f2e" Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.804273 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fda58da2-6365-4820-ba0b-bb84d2db08a3","Type":"ContainerStarted","Data":"0447c5d502ecb31747aaec14237d73b70ff66442af4ed7fdccfc54b8ca316436"} Dec 02 17:05:07 crc kubenswrapper[4747]: I1202 17:05:07.880032 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:05:09 crc kubenswrapper[4747]: I1202 17:05:09.840034 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fda58da2-6365-4820-ba0b-bb84d2db08a3","Type":"ContainerStarted","Data":"ba2d8b595f4b40de5ea41b6afb3386e0c1152ec3a1d02e086e301fc1491c0a74"} Dec 02 17:05:09 crc kubenswrapper[4747]: I1202 17:05:09.840450 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 17:05:09 crc kubenswrapper[4747]: I1202 17:05:09.840248 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerName="proxy-httpd" containerID="cri-o://ba2d8b595f4b40de5ea41b6afb3386e0c1152ec3a1d02e086e301fc1491c0a74" gracePeriod=30 Dec 02 17:05:09 crc kubenswrapper[4747]: I1202 17:05:09.840195 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerName="ceilometer-central-agent" containerID="cri-o://3f8e54fcace5653c2b747a76082984b457dd030b88b200ff82291e9025583c42" gracePeriod=30 Dec 02 17:05:09 crc kubenswrapper[4747]: I1202 17:05:09.840367 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerName="ceilometer-notification-agent" containerID="cri-o://bfe302fa571375828f6b1fb632a00d66f712e4586d15c30a1922303874c2f947" gracePeriod=30 Dec 02 17:05:09 crc kubenswrapper[4747]: I1202 17:05:09.840283 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerName="sg-core" containerID="cri-o://0447c5d502ecb31747aaec14237d73b70ff66442af4ed7fdccfc54b8ca316436" gracePeriod=30 Dec 02 17:05:09 crc kubenswrapper[4747]: I1202 17:05:09.871528 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.752774413 podStartE2EDuration="7.871508181s" podCreationTimestamp="2025-12-02 17:05:02 +0000 UTC" firstStartedPulling="2025-12-02 17:05:03.670776795 +0000 UTC m=+1334.197665544" lastFinishedPulling="2025-12-02 17:05:08.789510563 +0000 UTC m=+1339.316399312" observedRunningTime="2025-12-02 17:05:09.864580833 +0000 UTC m=+1340.391469582" watchObservedRunningTime="2025-12-02 17:05:09.871508181 +0000 UTC m=+1340.398396920" Dec 02 17:05:10 crc kubenswrapper[4747]: I1202 17:05:10.853176 4747 generic.go:334] "Generic (PLEG): container finished" podID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerID="ba2d8b595f4b40de5ea41b6afb3386e0c1152ec3a1d02e086e301fc1491c0a74" exitCode=0 Dec 02 17:05:10 crc kubenswrapper[4747]: I1202 17:05:10.854282 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fda58da2-6365-4820-ba0b-bb84d2db08a3","Type":"ContainerDied","Data":"ba2d8b595f4b40de5ea41b6afb3386e0c1152ec3a1d02e086e301fc1491c0a74"} Dec 02 17:05:10 crc kubenswrapper[4747]: I1202 17:05:10.854327 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fda58da2-6365-4820-ba0b-bb84d2db08a3","Type":"ContainerDied","Data":"0447c5d502ecb31747aaec14237d73b70ff66442af4ed7fdccfc54b8ca316436"} Dec 02 17:05:10 crc kubenswrapper[4747]: I1202 17:05:10.854543 4747 generic.go:334] "Generic (PLEG): container finished" podID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerID="0447c5d502ecb31747aaec14237d73b70ff66442af4ed7fdccfc54b8ca316436" exitCode=2 Dec 02 17:05:10 crc kubenswrapper[4747]: I1202 17:05:10.854666 4747 generic.go:334] "Generic (PLEG): container finished" podID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerID="bfe302fa571375828f6b1fb632a00d66f712e4586d15c30a1922303874c2f947" exitCode=0 Dec 02 17:05:10 crc kubenswrapper[4747]: I1202 17:05:10.854733 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fda58da2-6365-4820-ba0b-bb84d2db08a3","Type":"ContainerDied","Data":"bfe302fa571375828f6b1fb632a00d66f712e4586d15c30a1922303874c2f947"} Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.694429 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.831986 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-combined-ca-bundle\") pod \"fda58da2-6365-4820-ba0b-bb84d2db08a3\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.832097 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5lj4\" (UniqueName: \"kubernetes.io/projected/fda58da2-6365-4820-ba0b-bb84d2db08a3-kube-api-access-d5lj4\") pod \"fda58da2-6365-4820-ba0b-bb84d2db08a3\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.832142 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-sg-core-conf-yaml\") pod \"fda58da2-6365-4820-ba0b-bb84d2db08a3\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.832170 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-scripts\") pod \"fda58da2-6365-4820-ba0b-bb84d2db08a3\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.832192 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fda58da2-6365-4820-ba0b-bb84d2db08a3-run-httpd\") pod \"fda58da2-6365-4820-ba0b-bb84d2db08a3\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.832236 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fda58da2-6365-4820-ba0b-bb84d2db08a3-log-httpd\") pod \"fda58da2-6365-4820-ba0b-bb84d2db08a3\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.832300 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-config-data\") pod \"fda58da2-6365-4820-ba0b-bb84d2db08a3\" (UID: \"fda58da2-6365-4820-ba0b-bb84d2db08a3\") " Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.834050 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fda58da2-6365-4820-ba0b-bb84d2db08a3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fda58da2-6365-4820-ba0b-bb84d2db08a3" (UID: "fda58da2-6365-4820-ba0b-bb84d2db08a3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.834140 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fda58da2-6365-4820-ba0b-bb84d2db08a3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fda58da2-6365-4820-ba0b-bb84d2db08a3" (UID: "fda58da2-6365-4820-ba0b-bb84d2db08a3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.839237 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda58da2-6365-4820-ba0b-bb84d2db08a3-kube-api-access-d5lj4" (OuterVolumeSpecName: "kube-api-access-d5lj4") pod "fda58da2-6365-4820-ba0b-bb84d2db08a3" (UID: "fda58da2-6365-4820-ba0b-bb84d2db08a3"). InnerVolumeSpecName "kube-api-access-d5lj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.839569 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-scripts" (OuterVolumeSpecName: "scripts") pod "fda58da2-6365-4820-ba0b-bb84d2db08a3" (UID: "fda58da2-6365-4820-ba0b-bb84d2db08a3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.870450 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fda58da2-6365-4820-ba0b-bb84d2db08a3" (UID: "fda58da2-6365-4820-ba0b-bb84d2db08a3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.877367 4747 generic.go:334] "Generic (PLEG): container finished" podID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerID="3f8e54fcace5653c2b747a76082984b457dd030b88b200ff82291e9025583c42" exitCode=0 Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.877611 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fda58da2-6365-4820-ba0b-bb84d2db08a3","Type":"ContainerDied","Data":"3f8e54fcace5653c2b747a76082984b457dd030b88b200ff82291e9025583c42"} Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.877646 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fda58da2-6365-4820-ba0b-bb84d2db08a3","Type":"ContainerDied","Data":"a350c8d5d8cd5269335a8654f35c2eccc8fb5ed5922ed6d28c9153167281dad2"} Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.877671 4747 scope.go:117] "RemoveContainer" containerID="ba2d8b595f4b40de5ea41b6afb3386e0c1152ec3a1d02e086e301fc1491c0a74" Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.877678 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.916089 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fda58da2-6365-4820-ba0b-bb84d2db08a3" (UID: "fda58da2-6365-4820-ba0b-bb84d2db08a3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.934539 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5lj4\" (UniqueName: \"kubernetes.io/projected/fda58da2-6365-4820-ba0b-bb84d2db08a3-kube-api-access-d5lj4\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.934568 4747 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.934578 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.934586 4747 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fda58da2-6365-4820-ba0b-bb84d2db08a3-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.934597 4747 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fda58da2-6365-4820-ba0b-bb84d2db08a3-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.934605 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.935626 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-config-data" (OuterVolumeSpecName: "config-data") pod "fda58da2-6365-4820-ba0b-bb84d2db08a3" (UID: "fda58da2-6365-4820-ba0b-bb84d2db08a3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.953641 4747 scope.go:117] "RemoveContainer" containerID="0447c5d502ecb31747aaec14237d73b70ff66442af4ed7fdccfc54b8ca316436" Dec 02 17:05:12 crc kubenswrapper[4747]: I1202 17:05:12.979180 4747 scope.go:117] "RemoveContainer" containerID="bfe302fa571375828f6b1fb632a00d66f712e4586d15c30a1922303874c2f947" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.002021 4747 scope.go:117] "RemoveContainer" containerID="3f8e54fcace5653c2b747a76082984b457dd030b88b200ff82291e9025583c42" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.023775 4747 scope.go:117] "RemoveContainer" containerID="ba2d8b595f4b40de5ea41b6afb3386e0c1152ec3a1d02e086e301fc1491c0a74" Dec 02 17:05:13 crc kubenswrapper[4747]: E1202 17:05:13.024490 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba2d8b595f4b40de5ea41b6afb3386e0c1152ec3a1d02e086e301fc1491c0a74\": container with ID starting with ba2d8b595f4b40de5ea41b6afb3386e0c1152ec3a1d02e086e301fc1491c0a74 not found: ID does not exist" containerID="ba2d8b595f4b40de5ea41b6afb3386e0c1152ec3a1d02e086e301fc1491c0a74" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.024528 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba2d8b595f4b40de5ea41b6afb3386e0c1152ec3a1d02e086e301fc1491c0a74"} err="failed to get container status \"ba2d8b595f4b40de5ea41b6afb3386e0c1152ec3a1d02e086e301fc1491c0a74\": rpc error: code = NotFound desc = could not find container \"ba2d8b595f4b40de5ea41b6afb3386e0c1152ec3a1d02e086e301fc1491c0a74\": container with ID starting with ba2d8b595f4b40de5ea41b6afb3386e0c1152ec3a1d02e086e301fc1491c0a74 not found: ID does not exist" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.024558 4747 scope.go:117] "RemoveContainer" containerID="0447c5d502ecb31747aaec14237d73b70ff66442af4ed7fdccfc54b8ca316436" Dec 02 17:05:13 crc kubenswrapper[4747]: E1202 17:05:13.025009 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0447c5d502ecb31747aaec14237d73b70ff66442af4ed7fdccfc54b8ca316436\": container with ID starting with 0447c5d502ecb31747aaec14237d73b70ff66442af4ed7fdccfc54b8ca316436 not found: ID does not exist" containerID="0447c5d502ecb31747aaec14237d73b70ff66442af4ed7fdccfc54b8ca316436" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.025073 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0447c5d502ecb31747aaec14237d73b70ff66442af4ed7fdccfc54b8ca316436"} err="failed to get container status \"0447c5d502ecb31747aaec14237d73b70ff66442af4ed7fdccfc54b8ca316436\": rpc error: code = NotFound desc = could not find container \"0447c5d502ecb31747aaec14237d73b70ff66442af4ed7fdccfc54b8ca316436\": container with ID starting with 0447c5d502ecb31747aaec14237d73b70ff66442af4ed7fdccfc54b8ca316436 not found: ID does not exist" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.025117 4747 scope.go:117] "RemoveContainer" containerID="bfe302fa571375828f6b1fb632a00d66f712e4586d15c30a1922303874c2f947" Dec 02 17:05:13 crc kubenswrapper[4747]: E1202 17:05:13.026040 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfe302fa571375828f6b1fb632a00d66f712e4586d15c30a1922303874c2f947\": container with ID starting with bfe302fa571375828f6b1fb632a00d66f712e4586d15c30a1922303874c2f947 not found: ID does not exist" containerID="bfe302fa571375828f6b1fb632a00d66f712e4586d15c30a1922303874c2f947" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.026072 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfe302fa571375828f6b1fb632a00d66f712e4586d15c30a1922303874c2f947"} err="failed to get container status \"bfe302fa571375828f6b1fb632a00d66f712e4586d15c30a1922303874c2f947\": rpc error: code = NotFound desc = could not find container \"bfe302fa571375828f6b1fb632a00d66f712e4586d15c30a1922303874c2f947\": container with ID starting with bfe302fa571375828f6b1fb632a00d66f712e4586d15c30a1922303874c2f947 not found: ID does not exist" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.026098 4747 scope.go:117] "RemoveContainer" containerID="3f8e54fcace5653c2b747a76082984b457dd030b88b200ff82291e9025583c42" Dec 02 17:05:13 crc kubenswrapper[4747]: E1202 17:05:13.026474 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f8e54fcace5653c2b747a76082984b457dd030b88b200ff82291e9025583c42\": container with ID starting with 3f8e54fcace5653c2b747a76082984b457dd030b88b200ff82291e9025583c42 not found: ID does not exist" containerID="3f8e54fcace5653c2b747a76082984b457dd030b88b200ff82291e9025583c42" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.026507 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f8e54fcace5653c2b747a76082984b457dd030b88b200ff82291e9025583c42"} err="failed to get container status \"3f8e54fcace5653c2b747a76082984b457dd030b88b200ff82291e9025583c42\": rpc error: code = NotFound desc = could not find container \"3f8e54fcace5653c2b747a76082984b457dd030b88b200ff82291e9025583c42\": container with ID starting with 3f8e54fcace5653c2b747a76082984b457dd030b88b200ff82291e9025583c42 not found: ID does not exist" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.037343 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fda58da2-6365-4820-ba0b-bb84d2db08a3-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.226199 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.238191 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.255147 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:05:13 crc kubenswrapper[4747]: E1202 17:05:13.255663 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e07ad04-23dd-4c42-bce3-0131ec6ad8f7" containerName="mariadb-account-create" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.255684 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e07ad04-23dd-4c42-bce3-0131ec6ad8f7" containerName="mariadb-account-create" Dec 02 17:05:13 crc kubenswrapper[4747]: E1202 17:05:13.255702 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerName="ceilometer-notification-agent" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.255709 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerName="ceilometer-notification-agent" Dec 02 17:05:13 crc kubenswrapper[4747]: E1202 17:05:13.255720 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerName="ceilometer-central-agent" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.255727 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerName="ceilometer-central-agent" Dec 02 17:05:13 crc kubenswrapper[4747]: E1202 17:05:13.255746 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerName="proxy-httpd" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.255752 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerName="proxy-httpd" Dec 02 17:05:13 crc kubenswrapper[4747]: E1202 17:05:13.255772 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerName="sg-core" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.255779 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerName="sg-core" Dec 02 17:05:13 crc kubenswrapper[4747]: E1202 17:05:13.255795 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae346fbe-0ec8-4b40-9264-afca3b7110ba" containerName="mariadb-account-create" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.255801 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae346fbe-0ec8-4b40-9264-afca3b7110ba" containerName="mariadb-account-create" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.255987 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e07ad04-23dd-4c42-bce3-0131ec6ad8f7" containerName="mariadb-account-create" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.256002 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerName="proxy-httpd" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.256009 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae346fbe-0ec8-4b40-9264-afca3b7110ba" containerName="mariadb-account-create" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.256018 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerName="ceilometer-notification-agent" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.256030 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerName="sg-core" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.256037 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="fda58da2-6365-4820-ba0b-bb84d2db08a3" containerName="ceilometer-central-agent" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.275846 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.280265 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.280674 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.289847 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.343248 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.343318 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-run-httpd\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.343345 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-log-httpd\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.343712 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.343804 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2ml9\" (UniqueName: \"kubernetes.io/projected/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-kube-api-access-b2ml9\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.344018 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-config-data\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.344073 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-scripts\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.446493 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-config-data\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.446571 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-scripts\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.446638 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.446681 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-run-httpd\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.446713 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-log-httpd\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.446915 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.447400 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-log-httpd\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.447442 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-run-httpd\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.447477 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2ml9\" (UniqueName: \"kubernetes.io/projected/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-kube-api-access-b2ml9\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.450891 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.451083 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.451837 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-config-data\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.454668 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-scripts\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.468794 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2ml9\" (UniqueName: \"kubernetes.io/projected/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-kube-api-access-b2ml9\") pod \"ceilometer-0\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.606759 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.718530 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-rtm6n"] Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.719712 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-rtm6n" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.721862 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.722286 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.722645 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-r56xf" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.730973 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-rtm6n"] Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.801677 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda58da2-6365-4820-ba0b-bb84d2db08a3" path="/var/lib/kubelet/pods/fda58da2-6365-4820-ba0b-bb84d2db08a3/volumes" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.872872 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h58st\" (UniqueName: \"kubernetes.io/projected/343064c0-a3ec-4048-93a1-e76e9f648b17-kube-api-access-h58st\") pod \"nova-cell0-conductor-db-sync-rtm6n\" (UID: \"343064c0-a3ec-4048-93a1-e76e9f648b17\") " pod="openstack/nova-cell0-conductor-db-sync-rtm6n" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.872997 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-rtm6n\" (UID: \"343064c0-a3ec-4048-93a1-e76e9f648b17\") " pod="openstack/nova-cell0-conductor-db-sync-rtm6n" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.873107 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-scripts\") pod \"nova-cell0-conductor-db-sync-rtm6n\" (UID: \"343064c0-a3ec-4048-93a1-e76e9f648b17\") " pod="openstack/nova-cell0-conductor-db-sync-rtm6n" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.873201 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-config-data\") pod \"nova-cell0-conductor-db-sync-rtm6n\" (UID: \"343064c0-a3ec-4048-93a1-e76e9f648b17\") " pod="openstack/nova-cell0-conductor-db-sync-rtm6n" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.974895 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-rtm6n\" (UID: \"343064c0-a3ec-4048-93a1-e76e9f648b17\") " pod="openstack/nova-cell0-conductor-db-sync-rtm6n" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.974997 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-scripts\") pod \"nova-cell0-conductor-db-sync-rtm6n\" (UID: \"343064c0-a3ec-4048-93a1-e76e9f648b17\") " pod="openstack/nova-cell0-conductor-db-sync-rtm6n" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.975044 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-config-data\") pod \"nova-cell0-conductor-db-sync-rtm6n\" (UID: \"343064c0-a3ec-4048-93a1-e76e9f648b17\") " pod="openstack/nova-cell0-conductor-db-sync-rtm6n" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.975194 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h58st\" (UniqueName: \"kubernetes.io/projected/343064c0-a3ec-4048-93a1-e76e9f648b17-kube-api-access-h58st\") pod \"nova-cell0-conductor-db-sync-rtm6n\" (UID: \"343064c0-a3ec-4048-93a1-e76e9f648b17\") " pod="openstack/nova-cell0-conductor-db-sync-rtm6n" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.979711 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-scripts\") pod \"nova-cell0-conductor-db-sync-rtm6n\" (UID: \"343064c0-a3ec-4048-93a1-e76e9f648b17\") " pod="openstack/nova-cell0-conductor-db-sync-rtm6n" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.980054 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-rtm6n\" (UID: \"343064c0-a3ec-4048-93a1-e76e9f648b17\") " pod="openstack/nova-cell0-conductor-db-sync-rtm6n" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.980063 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-config-data\") pod \"nova-cell0-conductor-db-sync-rtm6n\" (UID: \"343064c0-a3ec-4048-93a1-e76e9f648b17\") " pod="openstack/nova-cell0-conductor-db-sync-rtm6n" Dec 02 17:05:13 crc kubenswrapper[4747]: I1202 17:05:13.995295 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h58st\" (UniqueName: \"kubernetes.io/projected/343064c0-a3ec-4048-93a1-e76e9f648b17-kube-api-access-h58st\") pod \"nova-cell0-conductor-db-sync-rtm6n\" (UID: \"343064c0-a3ec-4048-93a1-e76e9f648b17\") " pod="openstack/nova-cell0-conductor-db-sync-rtm6n" Dec 02 17:05:14 crc kubenswrapper[4747]: I1202 17:05:14.049406 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-rtm6n" Dec 02 17:05:14 crc kubenswrapper[4747]: I1202 17:05:14.219079 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:05:14 crc kubenswrapper[4747]: I1202 17:05:14.517461 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-rtm6n"] Dec 02 17:05:14 crc kubenswrapper[4747]: W1202 17:05:14.526587 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod343064c0_a3ec_4048_93a1_e76e9f648b17.slice/crio-55fe356f25f65177e89fd11c768407072f2a411d04d8ca4c1de0e5c832e492fe WatchSource:0}: Error finding container 55fe356f25f65177e89fd11c768407072f2a411d04d8ca4c1de0e5c832e492fe: Status 404 returned error can't find the container with id 55fe356f25f65177e89fd11c768407072f2a411d04d8ca4c1de0e5c832e492fe Dec 02 17:05:14 crc kubenswrapper[4747]: I1202 17:05:14.908737 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9ff4f8db-52f0-4281-aa39-dc2d06034aeb","Type":"ContainerStarted","Data":"816667123ba12db23479fb37f41330106ff61183c4740ffff9e8b83b4e3a7a6c"} Dec 02 17:05:14 crc kubenswrapper[4747]: I1202 17:05:14.911128 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-rtm6n" event={"ID":"343064c0-a3ec-4048-93a1-e76e9f648b17","Type":"ContainerStarted","Data":"55fe356f25f65177e89fd11c768407072f2a411d04d8ca4c1de0e5c832e492fe"} Dec 02 17:05:20 crc kubenswrapper[4747]: I1202 17:05:20.535527 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:05:21 crc kubenswrapper[4747]: I1202 17:05:21.944518 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 17:05:22 crc kubenswrapper[4747]: I1202 17:05:22.001514 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9ff4f8db-52f0-4281-aa39-dc2d06034aeb","Type":"ContainerStarted","Data":"c4d065fc57bfb37bf738f1f1593cd8f6ddd1231a319fe635daa4a54bf5adae25"} Dec 02 17:05:22 crc kubenswrapper[4747]: I1202 17:05:22.005075 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-rtm6n" event={"ID":"343064c0-a3ec-4048-93a1-e76e9f648b17","Type":"ContainerStarted","Data":"4fcdd09b3c35225b33d4e07afa5133d3662e8af492e510d5ad1b047eee63dc58"} Dec 02 17:05:26 crc kubenswrapper[4747]: I1202 17:05:26.047314 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9ff4f8db-52f0-4281-aa39-dc2d06034aeb","Type":"ContainerStarted","Data":"10ba407975e5edfc61056d8b6c8362f9b2b66be36e9da2541a30e965441ab479"} Dec 02 17:05:27 crc kubenswrapper[4747]: I1202 17:05:27.065592 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9ff4f8db-52f0-4281-aa39-dc2d06034aeb","Type":"ContainerStarted","Data":"011849020f6dd70259ab1a3c14b45edc4e1f93e6b44f007cb9dcd24c8dd58b71"} Dec 02 17:05:28 crc kubenswrapper[4747]: I1202 17:05:28.077158 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9ff4f8db-52f0-4281-aa39-dc2d06034aeb","Type":"ContainerStarted","Data":"c62d1f63325253af931e98a5aa254aeee6f7b877a07a5c16d79e7a266eaf7f76"} Dec 02 17:05:28 crc kubenswrapper[4747]: I1202 17:05:28.077822 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 17:05:28 crc kubenswrapper[4747]: I1202 17:05:28.077942 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerName="ceilometer-notification-agent" containerID="cri-o://10ba407975e5edfc61056d8b6c8362f9b2b66be36e9da2541a30e965441ab479" gracePeriod=30 Dec 02 17:05:28 crc kubenswrapper[4747]: I1202 17:05:28.077973 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerName="proxy-httpd" containerID="cri-o://c62d1f63325253af931e98a5aa254aeee6f7b877a07a5c16d79e7a266eaf7f76" gracePeriod=30 Dec 02 17:05:28 crc kubenswrapper[4747]: I1202 17:05:28.077880 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerName="ceilometer-central-agent" containerID="cri-o://c4d065fc57bfb37bf738f1f1593cd8f6ddd1231a319fe635daa4a54bf5adae25" gracePeriod=30 Dec 02 17:05:28 crc kubenswrapper[4747]: I1202 17:05:28.078107 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerName="sg-core" containerID="cri-o://011849020f6dd70259ab1a3c14b45edc4e1f93e6b44f007cb9dcd24c8dd58b71" gracePeriod=30 Dec 02 17:05:28 crc kubenswrapper[4747]: I1202 17:05:28.105581 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.6238428599999999 podStartE2EDuration="15.10556123s" podCreationTimestamp="2025-12-02 17:05:13 +0000 UTC" firstStartedPulling="2025-12-02 17:05:14.243392918 +0000 UTC m=+1344.770281667" lastFinishedPulling="2025-12-02 17:05:27.725111288 +0000 UTC m=+1358.252000037" observedRunningTime="2025-12-02 17:05:28.100503285 +0000 UTC m=+1358.627392034" watchObservedRunningTime="2025-12-02 17:05:28.10556123 +0000 UTC m=+1358.632449979" Dec 02 17:05:28 crc kubenswrapper[4747]: I1202 17:05:28.109058 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-rtm6n" podStartSLOduration=7.918019909 podStartE2EDuration="15.1090481s" podCreationTimestamp="2025-12-02 17:05:13 +0000 UTC" firstStartedPulling="2025-12-02 17:05:14.531628852 +0000 UTC m=+1345.058517601" lastFinishedPulling="2025-12-02 17:05:21.722657043 +0000 UTC m=+1352.249545792" observedRunningTime="2025-12-02 17:05:22.024756354 +0000 UTC m=+1352.551645103" watchObservedRunningTime="2025-12-02 17:05:28.1090481 +0000 UTC m=+1358.635936849" Dec 02 17:05:29 crc kubenswrapper[4747]: I1202 17:05:29.097261 4747 generic.go:334] "Generic (PLEG): container finished" podID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerID="c62d1f63325253af931e98a5aa254aeee6f7b877a07a5c16d79e7a266eaf7f76" exitCode=0 Dec 02 17:05:29 crc kubenswrapper[4747]: I1202 17:05:29.097549 4747 generic.go:334] "Generic (PLEG): container finished" podID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerID="011849020f6dd70259ab1a3c14b45edc4e1f93e6b44f007cb9dcd24c8dd58b71" exitCode=2 Dec 02 17:05:29 crc kubenswrapper[4747]: I1202 17:05:29.097558 4747 generic.go:334] "Generic (PLEG): container finished" podID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerID="10ba407975e5edfc61056d8b6c8362f9b2b66be36e9da2541a30e965441ab479" exitCode=0 Dec 02 17:05:29 crc kubenswrapper[4747]: I1202 17:05:29.097584 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9ff4f8db-52f0-4281-aa39-dc2d06034aeb","Type":"ContainerDied","Data":"c62d1f63325253af931e98a5aa254aeee6f7b877a07a5c16d79e7a266eaf7f76"} Dec 02 17:05:29 crc kubenswrapper[4747]: I1202 17:05:29.097613 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9ff4f8db-52f0-4281-aa39-dc2d06034aeb","Type":"ContainerDied","Data":"011849020f6dd70259ab1a3c14b45edc4e1f93e6b44f007cb9dcd24c8dd58b71"} Dec 02 17:05:29 crc kubenswrapper[4747]: I1202 17:05:29.097623 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9ff4f8db-52f0-4281-aa39-dc2d06034aeb","Type":"ContainerDied","Data":"10ba407975e5edfc61056d8b6c8362f9b2b66be36e9da2541a30e965441ab479"} Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.853738 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.888032 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-combined-ca-bundle\") pod \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.888118 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2ml9\" (UniqueName: \"kubernetes.io/projected/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-kube-api-access-b2ml9\") pod \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.888216 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-log-httpd\") pod \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.888239 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-run-httpd\") pod \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.888259 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-config-data\") pod \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.888298 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-sg-core-conf-yaml\") pod \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.888348 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-scripts\") pod \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\" (UID: \"9ff4f8db-52f0-4281-aa39-dc2d06034aeb\") " Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.888946 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9ff4f8db-52f0-4281-aa39-dc2d06034aeb" (UID: "9ff4f8db-52f0-4281-aa39-dc2d06034aeb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.888999 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9ff4f8db-52f0-4281-aa39-dc2d06034aeb" (UID: "9ff4f8db-52f0-4281-aa39-dc2d06034aeb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.901345 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-kube-api-access-b2ml9" (OuterVolumeSpecName: "kube-api-access-b2ml9") pod "9ff4f8db-52f0-4281-aa39-dc2d06034aeb" (UID: "9ff4f8db-52f0-4281-aa39-dc2d06034aeb"). InnerVolumeSpecName "kube-api-access-b2ml9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.902024 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-scripts" (OuterVolumeSpecName: "scripts") pod "9ff4f8db-52f0-4281-aa39-dc2d06034aeb" (UID: "9ff4f8db-52f0-4281-aa39-dc2d06034aeb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.930163 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9ff4f8db-52f0-4281-aa39-dc2d06034aeb" (UID: "9ff4f8db-52f0-4281-aa39-dc2d06034aeb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.976747 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9ff4f8db-52f0-4281-aa39-dc2d06034aeb" (UID: "9ff4f8db-52f0-4281-aa39-dc2d06034aeb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.990670 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.990711 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.990727 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2ml9\" (UniqueName: \"kubernetes.io/projected/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-kube-api-access-b2ml9\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.990740 4747 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.990752 4747 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.990763 4747 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:35 crc kubenswrapper[4747]: I1202 17:05:35.995246 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-config-data" (OuterVolumeSpecName: "config-data") pod "9ff4f8db-52f0-4281-aa39-dc2d06034aeb" (UID: "9ff4f8db-52f0-4281-aa39-dc2d06034aeb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.092513 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ff4f8db-52f0-4281-aa39-dc2d06034aeb-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.166725 4747 generic.go:334] "Generic (PLEG): container finished" podID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerID="c4d065fc57bfb37bf738f1f1593cd8f6ddd1231a319fe635daa4a54bf5adae25" exitCode=0 Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.166774 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9ff4f8db-52f0-4281-aa39-dc2d06034aeb","Type":"ContainerDied","Data":"c4d065fc57bfb37bf738f1f1593cd8f6ddd1231a319fe635daa4a54bf5adae25"} Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.166804 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9ff4f8db-52f0-4281-aa39-dc2d06034aeb","Type":"ContainerDied","Data":"816667123ba12db23479fb37f41330106ff61183c4740ffff9e8b83b4e3a7a6c"} Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.166821 4747 scope.go:117] "RemoveContainer" containerID="c62d1f63325253af931e98a5aa254aeee6f7b877a07a5c16d79e7a266eaf7f76" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.166839 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.209472 4747 scope.go:117] "RemoveContainer" containerID="011849020f6dd70259ab1a3c14b45edc4e1f93e6b44f007cb9dcd24c8dd58b71" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.213007 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.220874 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.238042 4747 scope.go:117] "RemoveContainer" containerID="10ba407975e5edfc61056d8b6c8362f9b2b66be36e9da2541a30e965441ab479" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.238497 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:05:36 crc kubenswrapper[4747]: E1202 17:05:36.239021 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerName="proxy-httpd" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.239043 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerName="proxy-httpd" Dec 02 17:05:36 crc kubenswrapper[4747]: E1202 17:05:36.239072 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerName="ceilometer-notification-agent" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.239081 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerName="ceilometer-notification-agent" Dec 02 17:05:36 crc kubenswrapper[4747]: E1202 17:05:36.239098 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerName="ceilometer-central-agent" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.239108 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerName="ceilometer-central-agent" Dec 02 17:05:36 crc kubenswrapper[4747]: E1202 17:05:36.239122 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerName="sg-core" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.239130 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerName="sg-core" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.239354 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerName="ceilometer-notification-agent" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.239374 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerName="sg-core" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.239402 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerName="ceilometer-central-agent" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.239420 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" containerName="proxy-httpd" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.241444 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.247704 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.247846 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.258276 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.269178 4747 scope.go:117] "RemoveContainer" containerID="c4d065fc57bfb37bf738f1f1593cd8f6ddd1231a319fe635daa4a54bf5adae25" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.296527 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-config-data\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.296608 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvzms\" (UniqueName: \"kubernetes.io/projected/6d778f77-b54a-4ef2-b446-baa6ec3147ca-kube-api-access-dvzms\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.296726 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.296811 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-scripts\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.296879 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d778f77-b54a-4ef2-b446-baa6ec3147ca-run-httpd\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.297034 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.297101 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d778f77-b54a-4ef2-b446-baa6ec3147ca-log-httpd\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.298516 4747 scope.go:117] "RemoveContainer" containerID="c62d1f63325253af931e98a5aa254aeee6f7b877a07a5c16d79e7a266eaf7f76" Dec 02 17:05:36 crc kubenswrapper[4747]: E1202 17:05:36.299203 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c62d1f63325253af931e98a5aa254aeee6f7b877a07a5c16d79e7a266eaf7f76\": container with ID starting with c62d1f63325253af931e98a5aa254aeee6f7b877a07a5c16d79e7a266eaf7f76 not found: ID does not exist" containerID="c62d1f63325253af931e98a5aa254aeee6f7b877a07a5c16d79e7a266eaf7f76" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.299233 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c62d1f63325253af931e98a5aa254aeee6f7b877a07a5c16d79e7a266eaf7f76"} err="failed to get container status \"c62d1f63325253af931e98a5aa254aeee6f7b877a07a5c16d79e7a266eaf7f76\": rpc error: code = NotFound desc = could not find container \"c62d1f63325253af931e98a5aa254aeee6f7b877a07a5c16d79e7a266eaf7f76\": container with ID starting with c62d1f63325253af931e98a5aa254aeee6f7b877a07a5c16d79e7a266eaf7f76 not found: ID does not exist" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.299254 4747 scope.go:117] "RemoveContainer" containerID="011849020f6dd70259ab1a3c14b45edc4e1f93e6b44f007cb9dcd24c8dd58b71" Dec 02 17:05:36 crc kubenswrapper[4747]: E1202 17:05:36.299576 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"011849020f6dd70259ab1a3c14b45edc4e1f93e6b44f007cb9dcd24c8dd58b71\": container with ID starting with 011849020f6dd70259ab1a3c14b45edc4e1f93e6b44f007cb9dcd24c8dd58b71 not found: ID does not exist" containerID="011849020f6dd70259ab1a3c14b45edc4e1f93e6b44f007cb9dcd24c8dd58b71" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.299608 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"011849020f6dd70259ab1a3c14b45edc4e1f93e6b44f007cb9dcd24c8dd58b71"} err="failed to get container status \"011849020f6dd70259ab1a3c14b45edc4e1f93e6b44f007cb9dcd24c8dd58b71\": rpc error: code = NotFound desc = could not find container \"011849020f6dd70259ab1a3c14b45edc4e1f93e6b44f007cb9dcd24c8dd58b71\": container with ID starting with 011849020f6dd70259ab1a3c14b45edc4e1f93e6b44f007cb9dcd24c8dd58b71 not found: ID does not exist" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.299657 4747 scope.go:117] "RemoveContainer" containerID="10ba407975e5edfc61056d8b6c8362f9b2b66be36e9da2541a30e965441ab479" Dec 02 17:05:36 crc kubenswrapper[4747]: E1202 17:05:36.299953 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10ba407975e5edfc61056d8b6c8362f9b2b66be36e9da2541a30e965441ab479\": container with ID starting with 10ba407975e5edfc61056d8b6c8362f9b2b66be36e9da2541a30e965441ab479 not found: ID does not exist" containerID="10ba407975e5edfc61056d8b6c8362f9b2b66be36e9da2541a30e965441ab479" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.299980 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10ba407975e5edfc61056d8b6c8362f9b2b66be36e9da2541a30e965441ab479"} err="failed to get container status \"10ba407975e5edfc61056d8b6c8362f9b2b66be36e9da2541a30e965441ab479\": rpc error: code = NotFound desc = could not find container \"10ba407975e5edfc61056d8b6c8362f9b2b66be36e9da2541a30e965441ab479\": container with ID starting with 10ba407975e5edfc61056d8b6c8362f9b2b66be36e9da2541a30e965441ab479 not found: ID does not exist" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.300001 4747 scope.go:117] "RemoveContainer" containerID="c4d065fc57bfb37bf738f1f1593cd8f6ddd1231a319fe635daa4a54bf5adae25" Dec 02 17:05:36 crc kubenswrapper[4747]: E1202 17:05:36.300228 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4d065fc57bfb37bf738f1f1593cd8f6ddd1231a319fe635daa4a54bf5adae25\": container with ID starting with c4d065fc57bfb37bf738f1f1593cd8f6ddd1231a319fe635daa4a54bf5adae25 not found: ID does not exist" containerID="c4d065fc57bfb37bf738f1f1593cd8f6ddd1231a319fe635daa4a54bf5adae25" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.300253 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4d065fc57bfb37bf738f1f1593cd8f6ddd1231a319fe635daa4a54bf5adae25"} err="failed to get container status \"c4d065fc57bfb37bf738f1f1593cd8f6ddd1231a319fe635daa4a54bf5adae25\": rpc error: code = NotFound desc = could not find container \"c4d065fc57bfb37bf738f1f1593cd8f6ddd1231a319fe635daa4a54bf5adae25\": container with ID starting with c4d065fc57bfb37bf738f1f1593cd8f6ddd1231a319fe635daa4a54bf5adae25 not found: ID does not exist" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.399135 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-config-data\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.399220 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvzms\" (UniqueName: \"kubernetes.io/projected/6d778f77-b54a-4ef2-b446-baa6ec3147ca-kube-api-access-dvzms\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.399261 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.399298 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-scripts\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.399353 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d778f77-b54a-4ef2-b446-baa6ec3147ca-run-httpd\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.399408 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.399466 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d778f77-b54a-4ef2-b446-baa6ec3147ca-log-httpd\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.400018 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d778f77-b54a-4ef2-b446-baa6ec3147ca-run-httpd\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.400087 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d778f77-b54a-4ef2-b446-baa6ec3147ca-log-httpd\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.405420 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.406069 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-scripts\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.406081 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.406901 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-config-data\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.425709 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvzms\" (UniqueName: \"kubernetes.io/projected/6d778f77-b54a-4ef2-b446-baa6ec3147ca-kube-api-access-dvzms\") pod \"ceilometer-0\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " pod="openstack/ceilometer-0" Dec 02 17:05:36 crc kubenswrapper[4747]: I1202 17:05:36.562785 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:05:37 crc kubenswrapper[4747]: W1202 17:05:37.035794 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d778f77_b54a_4ef2_b446_baa6ec3147ca.slice/crio-ece620da383ffc4bf68703c60a3c11d19948d5091b839173c23c0d6dffa18f38 WatchSource:0}: Error finding container ece620da383ffc4bf68703c60a3c11d19948d5091b839173c23c0d6dffa18f38: Status 404 returned error can't find the container with id ece620da383ffc4bf68703c60a3c11d19948d5091b839173c23c0d6dffa18f38 Dec 02 17:05:37 crc kubenswrapper[4747]: I1202 17:05:37.039621 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:05:37 crc kubenswrapper[4747]: I1202 17:05:37.177794 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d778f77-b54a-4ef2-b446-baa6ec3147ca","Type":"ContainerStarted","Data":"ece620da383ffc4bf68703c60a3c11d19948d5091b839173c23c0d6dffa18f38"} Dec 02 17:05:37 crc kubenswrapper[4747]: I1202 17:05:37.799814 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ff4f8db-52f0-4281-aa39-dc2d06034aeb" path="/var/lib/kubelet/pods/9ff4f8db-52f0-4281-aa39-dc2d06034aeb/volumes" Dec 02 17:05:38 crc kubenswrapper[4747]: I1202 17:05:38.193385 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d778f77-b54a-4ef2-b446-baa6ec3147ca","Type":"ContainerStarted","Data":"6d96f62471ac1a7d4c748c425c7393cb211974c84ab4a4946368583fa3cf4988"} Dec 02 17:05:39 crc kubenswrapper[4747]: I1202 17:05:39.208833 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d778f77-b54a-4ef2-b446-baa6ec3147ca","Type":"ContainerStarted","Data":"3f3f664e9e1227bdcb6ca25ec3a30f39d94ca5bed8b9b97c1ee22266f204e5a7"} Dec 02 17:05:39 crc kubenswrapper[4747]: I1202 17:05:39.213617 4747 generic.go:334] "Generic (PLEG): container finished" podID="343064c0-a3ec-4048-93a1-e76e9f648b17" containerID="4fcdd09b3c35225b33d4e07afa5133d3662e8af492e510d5ad1b047eee63dc58" exitCode=0 Dec 02 17:05:39 crc kubenswrapper[4747]: I1202 17:05:39.213679 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-rtm6n" event={"ID":"343064c0-a3ec-4048-93a1-e76e9f648b17","Type":"ContainerDied","Data":"4fcdd09b3c35225b33d4e07afa5133d3662e8af492e510d5ad1b047eee63dc58"} Dec 02 17:05:40 crc kubenswrapper[4747]: I1202 17:05:40.226267 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d778f77-b54a-4ef2-b446-baa6ec3147ca","Type":"ContainerStarted","Data":"9eb04a61eccf9471f81811ad4f981829796d47352cb7e867291f0be2a783aece"} Dec 02 17:05:40 crc kubenswrapper[4747]: I1202 17:05:40.832808 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-rtm6n" Dec 02 17:05:40 crc kubenswrapper[4747]: I1202 17:05:40.953289 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h58st\" (UniqueName: \"kubernetes.io/projected/343064c0-a3ec-4048-93a1-e76e9f648b17-kube-api-access-h58st\") pod \"343064c0-a3ec-4048-93a1-e76e9f648b17\" (UID: \"343064c0-a3ec-4048-93a1-e76e9f648b17\") " Dec 02 17:05:40 crc kubenswrapper[4747]: I1202 17:05:40.953405 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-config-data\") pod \"343064c0-a3ec-4048-93a1-e76e9f648b17\" (UID: \"343064c0-a3ec-4048-93a1-e76e9f648b17\") " Dec 02 17:05:40 crc kubenswrapper[4747]: I1202 17:05:40.953500 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-scripts\") pod \"343064c0-a3ec-4048-93a1-e76e9f648b17\" (UID: \"343064c0-a3ec-4048-93a1-e76e9f648b17\") " Dec 02 17:05:40 crc kubenswrapper[4747]: I1202 17:05:40.953580 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-combined-ca-bundle\") pod \"343064c0-a3ec-4048-93a1-e76e9f648b17\" (UID: \"343064c0-a3ec-4048-93a1-e76e9f648b17\") " Dec 02 17:05:40 crc kubenswrapper[4747]: I1202 17:05:40.960340 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/343064c0-a3ec-4048-93a1-e76e9f648b17-kube-api-access-h58st" (OuterVolumeSpecName: "kube-api-access-h58st") pod "343064c0-a3ec-4048-93a1-e76e9f648b17" (UID: "343064c0-a3ec-4048-93a1-e76e9f648b17"). InnerVolumeSpecName "kube-api-access-h58st". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:05:40 crc kubenswrapper[4747]: I1202 17:05:40.960827 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-scripts" (OuterVolumeSpecName: "scripts") pod "343064c0-a3ec-4048-93a1-e76e9f648b17" (UID: "343064c0-a3ec-4048-93a1-e76e9f648b17"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:05:40 crc kubenswrapper[4747]: I1202 17:05:40.984689 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-config-data" (OuterVolumeSpecName: "config-data") pod "343064c0-a3ec-4048-93a1-e76e9f648b17" (UID: "343064c0-a3ec-4048-93a1-e76e9f648b17"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:05:40 crc kubenswrapper[4747]: I1202 17:05:40.984969 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "343064c0-a3ec-4048-93a1-e76e9f648b17" (UID: "343064c0-a3ec-4048-93a1-e76e9f648b17"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.057078 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h58st\" (UniqueName: \"kubernetes.io/projected/343064c0-a3ec-4048-93a1-e76e9f648b17-kube-api-access-h58st\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.057130 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.057157 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.057169 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/343064c0-a3ec-4048-93a1-e76e9f648b17-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.333198 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-rtm6n" event={"ID":"343064c0-a3ec-4048-93a1-e76e9f648b17","Type":"ContainerDied","Data":"55fe356f25f65177e89fd11c768407072f2a411d04d8ca4c1de0e5c832e492fe"} Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.333267 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55fe356f25f65177e89fd11c768407072f2a411d04d8ca4c1de0e5c832e492fe" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.333349 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-rtm6n" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.502447 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 17:05:41 crc kubenswrapper[4747]: E1202 17:05:41.502978 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="343064c0-a3ec-4048-93a1-e76e9f648b17" containerName="nova-cell0-conductor-db-sync" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.502999 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="343064c0-a3ec-4048-93a1-e76e9f648b17" containerName="nova-cell0-conductor-db-sync" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.503228 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="343064c0-a3ec-4048-93a1-e76e9f648b17" containerName="nova-cell0-conductor-db-sync" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.503856 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.507317 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.511192 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-r56xf" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.512817 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3523196-7b69-4247-b9fa-6b83ed18926a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a3523196-7b69-4247-b9fa-6b83ed18926a\") " pod="openstack/nova-cell0-conductor-0" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.513005 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3523196-7b69-4247-b9fa-6b83ed18926a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a3523196-7b69-4247-b9fa-6b83ed18926a\") " pod="openstack/nova-cell0-conductor-0" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.513131 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfktw\" (UniqueName: \"kubernetes.io/projected/a3523196-7b69-4247-b9fa-6b83ed18926a-kube-api-access-vfktw\") pod \"nova-cell0-conductor-0\" (UID: \"a3523196-7b69-4247-b9fa-6b83ed18926a\") " pod="openstack/nova-cell0-conductor-0" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.570986 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.616793 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3523196-7b69-4247-b9fa-6b83ed18926a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a3523196-7b69-4247-b9fa-6b83ed18926a\") " pod="openstack/nova-cell0-conductor-0" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.616875 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3523196-7b69-4247-b9fa-6b83ed18926a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a3523196-7b69-4247-b9fa-6b83ed18926a\") " pod="openstack/nova-cell0-conductor-0" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.616906 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfktw\" (UniqueName: \"kubernetes.io/projected/a3523196-7b69-4247-b9fa-6b83ed18926a-kube-api-access-vfktw\") pod \"nova-cell0-conductor-0\" (UID: \"a3523196-7b69-4247-b9fa-6b83ed18926a\") " pod="openstack/nova-cell0-conductor-0" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.624052 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3523196-7b69-4247-b9fa-6b83ed18926a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a3523196-7b69-4247-b9fa-6b83ed18926a\") " pod="openstack/nova-cell0-conductor-0" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.624986 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3523196-7b69-4247-b9fa-6b83ed18926a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a3523196-7b69-4247-b9fa-6b83ed18926a\") " pod="openstack/nova-cell0-conductor-0" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.636628 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfktw\" (UniqueName: \"kubernetes.io/projected/a3523196-7b69-4247-b9fa-6b83ed18926a-kube-api-access-vfktw\") pod \"nova-cell0-conductor-0\" (UID: \"a3523196-7b69-4247-b9fa-6b83ed18926a\") " pod="openstack/nova-cell0-conductor-0" Dec 02 17:05:41 crc kubenswrapper[4747]: I1202 17:05:41.829291 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 02 17:05:42 crc kubenswrapper[4747]: I1202 17:05:42.351472 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d778f77-b54a-4ef2-b446-baa6ec3147ca","Type":"ContainerStarted","Data":"c6095ce977dbcdf71c8c810fce98ffca6a3f85d9bbe696ba5b013fd041e8fa67"} Dec 02 17:05:42 crc kubenswrapper[4747]: I1202 17:05:42.351822 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 17:05:42 crc kubenswrapper[4747]: I1202 17:05:42.388651 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.7584709090000001 podStartE2EDuration="6.388623065s" podCreationTimestamp="2025-12-02 17:05:36 +0000 UTC" firstStartedPulling="2025-12-02 17:05:37.039018884 +0000 UTC m=+1367.565907643" lastFinishedPulling="2025-12-02 17:05:41.66917105 +0000 UTC m=+1372.196059799" observedRunningTime="2025-12-02 17:05:42.383032386 +0000 UTC m=+1372.909921145" watchObservedRunningTime="2025-12-02 17:05:42.388623065 +0000 UTC m=+1372.915511814" Dec 02 17:05:42 crc kubenswrapper[4747]: I1202 17:05:42.573978 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 17:05:42 crc kubenswrapper[4747]: W1202 17:05:42.578702 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3523196_7b69_4247_b9fa_6b83ed18926a.slice/crio-520e134372da51fbb28001e674d0e0b8672e0719732358375bc07cb32ebc4bc6 WatchSource:0}: Error finding container 520e134372da51fbb28001e674d0e0b8672e0719732358375bc07cb32ebc4bc6: Status 404 returned error can't find the container with id 520e134372da51fbb28001e674d0e0b8672e0719732358375bc07cb32ebc4bc6 Dec 02 17:05:43 crc kubenswrapper[4747]: I1202 17:05:43.366223 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a3523196-7b69-4247-b9fa-6b83ed18926a","Type":"ContainerStarted","Data":"c2c8df5842cd6ffadaceeb4a935f17fb403e06bf112074cdfdb62cd95672db46"} Dec 02 17:05:43 crc kubenswrapper[4747]: I1202 17:05:43.366283 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a3523196-7b69-4247-b9fa-6b83ed18926a","Type":"ContainerStarted","Data":"520e134372da51fbb28001e674d0e0b8672e0719732358375bc07cb32ebc4bc6"} Dec 02 17:05:44 crc kubenswrapper[4747]: I1202 17:05:44.375283 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 02 17:05:51 crc kubenswrapper[4747]: I1202 17:05:51.857961 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 02 17:05:51 crc kubenswrapper[4747]: I1202 17:05:51.880586 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=10.880557051 podStartE2EDuration="10.880557051s" podCreationTimestamp="2025-12-02 17:05:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:05:43.397516512 +0000 UTC m=+1373.924405261" watchObservedRunningTime="2025-12-02 17:05:51.880557051 +0000 UTC m=+1382.407445810" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.330259 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-djdrn"] Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.331808 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-djdrn" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.335054 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.335649 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.351474 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-djdrn"] Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.414546 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-djdrn\" (UID: \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\") " pod="openstack/nova-cell0-cell-mapping-djdrn" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.414759 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsq99\" (UniqueName: \"kubernetes.io/projected/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-kube-api-access-dsq99\") pod \"nova-cell0-cell-mapping-djdrn\" (UID: \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\") " pod="openstack/nova-cell0-cell-mapping-djdrn" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.414857 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-scripts\") pod \"nova-cell0-cell-mapping-djdrn\" (UID: \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\") " pod="openstack/nova-cell0-cell-mapping-djdrn" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.415061 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-config-data\") pod \"nova-cell0-cell-mapping-djdrn\" (UID: \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\") " pod="openstack/nova-cell0-cell-mapping-djdrn" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.518245 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-config-data\") pod \"nova-cell0-cell-mapping-djdrn\" (UID: \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\") " pod="openstack/nova-cell0-cell-mapping-djdrn" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.518441 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-djdrn\" (UID: \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\") " pod="openstack/nova-cell0-cell-mapping-djdrn" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.518504 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsq99\" (UniqueName: \"kubernetes.io/projected/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-kube-api-access-dsq99\") pod \"nova-cell0-cell-mapping-djdrn\" (UID: \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\") " pod="openstack/nova-cell0-cell-mapping-djdrn" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.518548 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-scripts\") pod \"nova-cell0-cell-mapping-djdrn\" (UID: \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\") " pod="openstack/nova-cell0-cell-mapping-djdrn" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.527372 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-djdrn\" (UID: \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\") " pod="openstack/nova-cell0-cell-mapping-djdrn" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.529994 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-config-data\") pod \"nova-cell0-cell-mapping-djdrn\" (UID: \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\") " pod="openstack/nova-cell0-cell-mapping-djdrn" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.549186 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-scripts\") pod \"nova-cell0-cell-mapping-djdrn\" (UID: \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\") " pod="openstack/nova-cell0-cell-mapping-djdrn" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.578699 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsq99\" (UniqueName: \"kubernetes.io/projected/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-kube-api-access-dsq99\") pod \"nova-cell0-cell-mapping-djdrn\" (UID: \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\") " pod="openstack/nova-cell0-cell-mapping-djdrn" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.595872 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.597169 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.600614 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.621488 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.675771 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-djdrn" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.715289 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.717299 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.723733 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8r57\" (UniqueName: \"kubernetes.io/projected/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-kube-api-access-w8r57\") pod \"nova-scheduler-0\" (UID: \"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36\") " pod="openstack/nova-scheduler-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.723794 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36\") " pod="openstack/nova-scheduler-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.723856 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-config-data\") pod \"nova-scheduler-0\" (UID: \"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36\") " pod="openstack/nova-scheduler-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.745723 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.760970 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.860769 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-config-data\") pod \"nova-scheduler-0\" (UID: \"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36\") " pod="openstack/nova-scheduler-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.864724 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8r57\" (UniqueName: \"kubernetes.io/projected/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-kube-api-access-w8r57\") pod \"nova-scheduler-0\" (UID: \"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36\") " pod="openstack/nova-scheduler-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.864927 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36\") " pod="openstack/nova-scheduler-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.877089 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-config-data\") pod \"nova-scheduler-0\" (UID: \"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36\") " pod="openstack/nova-scheduler-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.885679 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.894284 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.912788 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8r57\" (UniqueName: \"kubernetes.io/projected/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-kube-api-access-w8r57\") pod \"nova-scheduler-0\" (UID: \"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36\") " pod="openstack/nova-scheduler-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.913526 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.950927 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36\") " pod="openstack/nova-scheduler-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.968189 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\") " pod="openstack/nova-metadata-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.968265 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-logs\") pod \"nova-metadata-0\" (UID: \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\") " pod="openstack/nova-metadata-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.968297 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-config-data\") pod \"nova-metadata-0\" (UID: \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\") " pod="openstack/nova-metadata-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.968319 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c922e9b-6e52-4b03-977a-224b26ddeee5-config-data\") pod \"nova-api-0\" (UID: \"4c922e9b-6e52-4b03-977a-224b26ddeee5\") " pod="openstack/nova-api-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.968352 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-774rp\" (UniqueName: \"kubernetes.io/projected/4c922e9b-6e52-4b03-977a-224b26ddeee5-kube-api-access-774rp\") pod \"nova-api-0\" (UID: \"4c922e9b-6e52-4b03-977a-224b26ddeee5\") " pod="openstack/nova-api-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.968371 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glg54\" (UniqueName: \"kubernetes.io/projected/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-kube-api-access-glg54\") pod \"nova-metadata-0\" (UID: \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\") " pod="openstack/nova-metadata-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.968393 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c922e9b-6e52-4b03-977a-224b26ddeee5-logs\") pod \"nova-api-0\" (UID: \"4c922e9b-6e52-4b03-977a-224b26ddeee5\") " pod="openstack/nova-api-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.968414 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c922e9b-6e52-4b03-977a-224b26ddeee5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4c922e9b-6e52-4b03-977a-224b26ddeee5\") " pod="openstack/nova-api-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.979697 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 17:05:52 crc kubenswrapper[4747]: I1202 17:05:52.988656 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.000851 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.016393 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.017305 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.020022 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.026743 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-44bgt"] Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.031305 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.048467 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-44bgt"] Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.070266 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5z52j\" (UniqueName: \"kubernetes.io/projected/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-kube-api-access-5z52j\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.070314 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-logs\") pod \"nova-metadata-0\" (UID: \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\") " pod="openstack/nova-metadata-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.070335 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-config\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.070419 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3760cc83-1810-4268-a539-4d1bc6bd358c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3760cc83-1810-4268-a539-4d1bc6bd358c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.070734 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-config-data\") pod \"nova-metadata-0\" (UID: \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\") " pod="openstack/nova-metadata-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.070763 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c922e9b-6e52-4b03-977a-224b26ddeee5-config-data\") pod \"nova-api-0\" (UID: \"4c922e9b-6e52-4b03-977a-224b26ddeee5\") " pod="openstack/nova-api-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.070800 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.070827 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-774rp\" (UniqueName: \"kubernetes.io/projected/4c922e9b-6e52-4b03-977a-224b26ddeee5-kube-api-access-774rp\") pod \"nova-api-0\" (UID: \"4c922e9b-6e52-4b03-977a-224b26ddeee5\") " pod="openstack/nova-api-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.070848 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glg54\" (UniqueName: \"kubernetes.io/projected/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-kube-api-access-glg54\") pod \"nova-metadata-0\" (UID: \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\") " pod="openstack/nova-metadata-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.070866 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3760cc83-1810-4268-a539-4d1bc6bd358c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3760cc83-1810-4268-a539-4d1bc6bd358c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.070882 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.070901 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c922e9b-6e52-4b03-977a-224b26ddeee5-logs\") pod \"nova-api-0\" (UID: \"4c922e9b-6e52-4b03-977a-224b26ddeee5\") " pod="openstack/nova-api-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.070932 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c922e9b-6e52-4b03-977a-224b26ddeee5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4c922e9b-6e52-4b03-977a-224b26ddeee5\") " pod="openstack/nova-api-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.070965 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdjxd\" (UniqueName: \"kubernetes.io/projected/3760cc83-1810-4268-a539-4d1bc6bd358c-kube-api-access-fdjxd\") pod \"nova-cell1-novncproxy-0\" (UID: \"3760cc83-1810-4268-a539-4d1bc6bd358c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.070999 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.071037 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.071058 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\") " pod="openstack/nova-metadata-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.071735 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c922e9b-6e52-4b03-977a-224b26ddeee5-logs\") pod \"nova-api-0\" (UID: \"4c922e9b-6e52-4b03-977a-224b26ddeee5\") " pod="openstack/nova-api-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.073451 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-logs\") pod \"nova-metadata-0\" (UID: \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\") " pod="openstack/nova-metadata-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.078842 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c922e9b-6e52-4b03-977a-224b26ddeee5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4c922e9b-6e52-4b03-977a-224b26ddeee5\") " pod="openstack/nova-api-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.084446 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-config-data\") pod \"nova-metadata-0\" (UID: \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\") " pod="openstack/nova-metadata-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.084793 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c922e9b-6e52-4b03-977a-224b26ddeee5-config-data\") pod \"nova-api-0\" (UID: \"4c922e9b-6e52-4b03-977a-224b26ddeee5\") " pod="openstack/nova-api-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.086136 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\") " pod="openstack/nova-metadata-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.099399 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glg54\" (UniqueName: \"kubernetes.io/projected/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-kube-api-access-glg54\") pod \"nova-metadata-0\" (UID: \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\") " pod="openstack/nova-metadata-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.107040 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-774rp\" (UniqueName: \"kubernetes.io/projected/4c922e9b-6e52-4b03-977a-224b26ddeee5-kube-api-access-774rp\") pod \"nova-api-0\" (UID: \"4c922e9b-6e52-4b03-977a-224b26ddeee5\") " pod="openstack/nova-api-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.177122 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5z52j\" (UniqueName: \"kubernetes.io/projected/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-kube-api-access-5z52j\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.177229 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-config\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.177264 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3760cc83-1810-4268-a539-4d1bc6bd358c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3760cc83-1810-4268-a539-4d1bc6bd358c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.177360 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.177421 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3760cc83-1810-4268-a539-4d1bc6bd358c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3760cc83-1810-4268-a539-4d1bc6bd358c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.177443 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.177527 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdjxd\" (UniqueName: \"kubernetes.io/projected/3760cc83-1810-4268-a539-4d1bc6bd358c-kube-api-access-fdjxd\") pod \"nova-cell1-novncproxy-0\" (UID: \"3760cc83-1810-4268-a539-4d1bc6bd358c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.177583 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.177668 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.178548 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.178666 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.182358 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.182425 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3760cc83-1810-4268-a539-4d1bc6bd358c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3760cc83-1810-4268-a539-4d1bc6bd358c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.189562 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-config\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.190777 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3760cc83-1810-4268-a539-4d1bc6bd358c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3760cc83-1810-4268-a539-4d1bc6bd358c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.190876 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.201418 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5z52j\" (UniqueName: \"kubernetes.io/projected/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-kube-api-access-5z52j\") pod \"dnsmasq-dns-845d6d6f59-44bgt\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.203771 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdjxd\" (UniqueName: \"kubernetes.io/projected/3760cc83-1810-4268-a539-4d1bc6bd358c-kube-api-access-fdjxd\") pod \"nova-cell1-novncproxy-0\" (UID: \"3760cc83-1810-4268-a539-4d1bc6bd358c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.276547 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.296886 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.362726 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.385877 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.474038 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-djdrn"] Dec 02 17:05:53 crc kubenswrapper[4747]: W1202 17:05:53.805039 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod44e2aaf8_b5e0_4d63_95b1_8aac22e19a36.slice/crio-11290fd14adadb225f4eb2dc42731e096b25befada6f9bee67fce4892f31b12e WatchSource:0}: Error finding container 11290fd14adadb225f4eb2dc42731e096b25befada6f9bee67fce4892f31b12e: Status 404 returned error can't find the container with id 11290fd14adadb225f4eb2dc42731e096b25befada6f9bee67fce4892f31b12e Dec 02 17:05:53 crc kubenswrapper[4747]: I1202 17:05:53.811951 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:53.999718 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-ktcxd"] Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.002449 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-ktcxd" Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.007203 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.007368 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.018521 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-ktcxd"] Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.112582 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-config-data\") pod \"nova-cell1-conductor-db-sync-ktcxd\" (UID: \"5ccc4c22-9c49-461c-9c53-404bdc932adc\") " pod="openstack/nova-cell1-conductor-db-sync-ktcxd" Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.113326 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khh2n\" (UniqueName: \"kubernetes.io/projected/5ccc4c22-9c49-461c-9c53-404bdc932adc-kube-api-access-khh2n\") pod \"nova-cell1-conductor-db-sync-ktcxd\" (UID: \"5ccc4c22-9c49-461c-9c53-404bdc932adc\") " pod="openstack/nova-cell1-conductor-db-sync-ktcxd" Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.113402 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-scripts\") pod \"nova-cell1-conductor-db-sync-ktcxd\" (UID: \"5ccc4c22-9c49-461c-9c53-404bdc932adc\") " pod="openstack/nova-cell1-conductor-db-sync-ktcxd" Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.113910 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-ktcxd\" (UID: \"5ccc4c22-9c49-461c-9c53-404bdc932adc\") " pod="openstack/nova-cell1-conductor-db-sync-ktcxd" Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.120033 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 17:05:54 crc kubenswrapper[4747]: W1202 17:05:54.124906 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4c922e9b_6e52_4b03_977a_224b26ddeee5.slice/crio-c9c8f8aea5f31590cb697174cd1b7f61a8727b417af726e03456bd1feffbef48 WatchSource:0}: Error finding container c9c8f8aea5f31590cb697174cd1b7f61a8727b417af726e03456bd1feffbef48: Status 404 returned error can't find the container with id c9c8f8aea5f31590cb697174cd1b7f61a8727b417af726e03456bd1feffbef48 Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.219072 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-scripts\") pod \"nova-cell1-conductor-db-sync-ktcxd\" (UID: \"5ccc4c22-9c49-461c-9c53-404bdc932adc\") " pod="openstack/nova-cell1-conductor-db-sync-ktcxd" Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.219400 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-ktcxd\" (UID: \"5ccc4c22-9c49-461c-9c53-404bdc932adc\") " pod="openstack/nova-cell1-conductor-db-sync-ktcxd" Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.219553 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-config-data\") pod \"nova-cell1-conductor-db-sync-ktcxd\" (UID: \"5ccc4c22-9c49-461c-9c53-404bdc932adc\") " pod="openstack/nova-cell1-conductor-db-sync-ktcxd" Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.220788 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khh2n\" (UniqueName: \"kubernetes.io/projected/5ccc4c22-9c49-461c-9c53-404bdc932adc-kube-api-access-khh2n\") pod \"nova-cell1-conductor-db-sync-ktcxd\" (UID: \"5ccc4c22-9c49-461c-9c53-404bdc932adc\") " pod="openstack/nova-cell1-conductor-db-sync-ktcxd" Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.232049 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-config-data\") pod \"nova-cell1-conductor-db-sync-ktcxd\" (UID: \"5ccc4c22-9c49-461c-9c53-404bdc932adc\") " pod="openstack/nova-cell1-conductor-db-sync-ktcxd" Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.232742 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-ktcxd\" (UID: \"5ccc4c22-9c49-461c-9c53-404bdc932adc\") " pod="openstack/nova-cell1-conductor-db-sync-ktcxd" Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.250675 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khh2n\" (UniqueName: \"kubernetes.io/projected/5ccc4c22-9c49-461c-9c53-404bdc932adc-kube-api-access-khh2n\") pod \"nova-cell1-conductor-db-sync-ktcxd\" (UID: \"5ccc4c22-9c49-461c-9c53-404bdc932adc\") " pod="openstack/nova-cell1-conductor-db-sync-ktcxd" Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.251535 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-scripts\") pod \"nova-cell1-conductor-db-sync-ktcxd\" (UID: \"5ccc4c22-9c49-461c-9c53-404bdc932adc\") " pod="openstack/nova-cell1-conductor-db-sync-ktcxd" Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.353522 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-44bgt"] Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.355307 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-ktcxd" Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.393572 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.546851 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.658504 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" event={"ID":"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738","Type":"ContainerStarted","Data":"e2bd3a21828e49a0c3f8a2f1b771b9b7378c5b1a4231f5c1a4f7459b126e7061"} Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.660548 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13","Type":"ContainerStarted","Data":"0216af6977a07a1432c94d19bfd76b28446365ea6330c7fdc0208df40d9497c9"} Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.664161 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36","Type":"ContainerStarted","Data":"11290fd14adadb225f4eb2dc42731e096b25befada6f9bee67fce4892f31b12e"} Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.666717 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4c922e9b-6e52-4b03-977a-224b26ddeee5","Type":"ContainerStarted","Data":"c9c8f8aea5f31590cb697174cd1b7f61a8727b417af726e03456bd1feffbef48"} Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.673544 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-djdrn" event={"ID":"d56a0e01-8f6b-409a-96c1-5b67f7fc3528","Type":"ContainerStarted","Data":"b2a319184355a79fb68f1ff9f62c6fbeed8b9acccb109dcb6d7f12630345fe7b"} Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.673636 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-djdrn" event={"ID":"d56a0e01-8f6b-409a-96c1-5b67f7fc3528","Type":"ContainerStarted","Data":"3737a2d57cc98db58cfaaa3d7efc80908da3e1f17534d8cab821ddf3c7460e06"} Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.688333 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3760cc83-1810-4268-a539-4d1bc6bd358c","Type":"ContainerStarted","Data":"3668a91d7ec67a677dfa904323b19450cadfcd22d3eb3fa774df614ea71fb750"} Dec 02 17:05:54 crc kubenswrapper[4747]: I1202 17:05:54.715263 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-djdrn" podStartSLOduration=2.715232673 podStartE2EDuration="2.715232673s" podCreationTimestamp="2025-12-02 17:05:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:05:54.714946095 +0000 UTC m=+1385.241834844" watchObservedRunningTime="2025-12-02 17:05:54.715232673 +0000 UTC m=+1385.242121432" Dec 02 17:05:55 crc kubenswrapper[4747]: I1202 17:05:55.025612 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-ktcxd"] Dec 02 17:05:55 crc kubenswrapper[4747]: I1202 17:05:55.730167 4747 generic.go:334] "Generic (PLEG): container finished" podID="ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738" containerID="f194db040efb4e2049154dfe398a736516082470ebf13c519a208632d7675976" exitCode=0 Dec 02 17:05:55 crc kubenswrapper[4747]: I1202 17:05:55.730765 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" event={"ID":"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738","Type":"ContainerDied","Data":"f194db040efb4e2049154dfe398a736516082470ebf13c519a208632d7675976"} Dec 02 17:05:55 crc kubenswrapper[4747]: I1202 17:05:55.734618 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-ktcxd" event={"ID":"5ccc4c22-9c49-461c-9c53-404bdc932adc","Type":"ContainerStarted","Data":"f7f238f705e2364781e309764dd179ec75f3f55e29db68c35c61bdf58aece489"} Dec 02 17:05:55 crc kubenswrapper[4747]: I1202 17:05:55.734695 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-ktcxd" event={"ID":"5ccc4c22-9c49-461c-9c53-404bdc932adc","Type":"ContainerStarted","Data":"6d5aaa98c298ecaafba1b88ab7567a9beb808fd9b5d45674228c782698b30465"} Dec 02 17:05:55 crc kubenswrapper[4747]: I1202 17:05:55.797884 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-ktcxd" podStartSLOduration=2.7978531589999998 podStartE2EDuration="2.797853159s" podCreationTimestamp="2025-12-02 17:05:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:05:55.773220578 +0000 UTC m=+1386.300109327" watchObservedRunningTime="2025-12-02 17:05:55.797853159 +0000 UTC m=+1386.324741908" Dec 02 17:05:56 crc kubenswrapper[4747]: I1202 17:05:56.961804 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:05:56 crc kubenswrapper[4747]: I1202 17:05:56.981996 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 17:05:57 crc kubenswrapper[4747]: I1202 17:05:57.780417 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="3760cc83-1810-4268-a539-4d1bc6bd358c" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://153154f9572549e4e335fc81d1b6948fea46c6b5395904d91d183af900f673bd" gracePeriod=30 Dec 02 17:05:57 crc kubenswrapper[4747]: I1202 17:05:57.807542 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.01893403 podStartE2EDuration="5.807518571s" podCreationTimestamp="2025-12-02 17:05:52 +0000 UTC" firstStartedPulling="2025-12-02 17:05:54.558356836 +0000 UTC m=+1385.085245585" lastFinishedPulling="2025-12-02 17:05:57.346941377 +0000 UTC m=+1387.873830126" observedRunningTime="2025-12-02 17:05:57.802448457 +0000 UTC m=+1388.329337206" watchObservedRunningTime="2025-12-02 17:05:57.807518571 +0000 UTC m=+1388.334407320" Dec 02 17:05:58 crc kubenswrapper[4747]: I1202 17:05:58.363072 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:05:58 crc kubenswrapper[4747]: I1202 17:05:58.796292 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13","Type":"ContainerStarted","Data":"2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230"} Dec 02 17:05:58 crc kubenswrapper[4747]: I1202 17:05:58.796349 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13","Type":"ContainerStarted","Data":"38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770"} Dec 02 17:05:58 crc kubenswrapper[4747]: I1202 17:05:58.796473 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="733ca6fe-dc97-43ee-a95c-e7ed2bca9b13" containerName="nova-metadata-log" containerID="cri-o://2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230" gracePeriod=30 Dec 02 17:05:58 crc kubenswrapper[4747]: I1202 17:05:58.796538 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="733ca6fe-dc97-43ee-a95c-e7ed2bca9b13" containerName="nova-metadata-metadata" containerID="cri-o://38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770" gracePeriod=30 Dec 02 17:05:58 crc kubenswrapper[4747]: I1202 17:05:58.800458 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36","Type":"ContainerStarted","Data":"18c998486a7115ff63a08200494066db9081edf57764513a9d97024808463b3b"} Dec 02 17:05:58 crc kubenswrapper[4747]: I1202 17:05:58.808317 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4c922e9b-6e52-4b03-977a-224b26ddeee5","Type":"ContainerStarted","Data":"5262e4c331bf817e286e4b4699fdab1d653099813f8a1ff6148420e501ddd455"} Dec 02 17:05:58 crc kubenswrapper[4747]: I1202 17:05:58.808382 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4c922e9b-6e52-4b03-977a-224b26ddeee5","Type":"ContainerStarted","Data":"269746c0e6d1c3d8b798c845af304a0893438ed83de31d144cc9c085ccea8f6c"} Dec 02 17:05:58 crc kubenswrapper[4747]: I1202 17:05:58.811411 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3760cc83-1810-4268-a539-4d1bc6bd358c","Type":"ContainerStarted","Data":"153154f9572549e4e335fc81d1b6948fea46c6b5395904d91d183af900f673bd"} Dec 02 17:05:58 crc kubenswrapper[4747]: I1202 17:05:58.814430 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" event={"ID":"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738","Type":"ContainerStarted","Data":"88c3ca76ceccfbd890fb3653fffe5ffb95ab445de2f87eca618b9259ccb9267b"} Dec 02 17:05:58 crc kubenswrapper[4747]: I1202 17:05:58.814635 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:05:58 crc kubenswrapper[4747]: I1202 17:05:58.836471 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.882663893 podStartE2EDuration="6.836439227s" podCreationTimestamp="2025-12-02 17:05:52 +0000 UTC" firstStartedPulling="2025-12-02 17:05:54.399929616 +0000 UTC m=+1384.926818365" lastFinishedPulling="2025-12-02 17:05:57.35370495 +0000 UTC m=+1387.880593699" observedRunningTime="2025-12-02 17:05:58.819206467 +0000 UTC m=+1389.346095226" watchObservedRunningTime="2025-12-02 17:05:58.836439227 +0000 UTC m=+1389.363327976" Dec 02 17:05:58 crc kubenswrapper[4747]: I1202 17:05:58.849961 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.595389074 podStartE2EDuration="6.849932792s" podCreationTimestamp="2025-12-02 17:05:52 +0000 UTC" firstStartedPulling="2025-12-02 17:05:54.129363942 +0000 UTC m=+1384.656252691" lastFinishedPulling="2025-12-02 17:05:57.38390766 +0000 UTC m=+1387.910796409" observedRunningTime="2025-12-02 17:05:58.84215237 +0000 UTC m=+1389.369041119" watchObservedRunningTime="2025-12-02 17:05:58.849932792 +0000 UTC m=+1389.376821541" Dec 02 17:05:58 crc kubenswrapper[4747]: I1202 17:05:58.884270 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.428917154 podStartE2EDuration="6.884241628s" podCreationTimestamp="2025-12-02 17:05:52 +0000 UTC" firstStartedPulling="2025-12-02 17:05:53.82441483 +0000 UTC m=+1384.351303589" lastFinishedPulling="2025-12-02 17:05:57.279739314 +0000 UTC m=+1387.806628063" observedRunningTime="2025-12-02 17:05:58.870566689 +0000 UTC m=+1389.397455438" watchObservedRunningTime="2025-12-02 17:05:58.884241628 +0000 UTC m=+1389.411130377" Dec 02 17:05:58 crc kubenswrapper[4747]: I1202 17:05:58.902328 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" podStartSLOduration=6.902304533 podStartE2EDuration="6.902304533s" podCreationTimestamp="2025-12-02 17:05:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:05:58.89482792 +0000 UTC m=+1389.421716669" watchObservedRunningTime="2025-12-02 17:05:58.902304533 +0000 UTC m=+1389.429193282" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.466909 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.572017 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-config-data\") pod \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\" (UID: \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\") " Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.572130 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glg54\" (UniqueName: \"kubernetes.io/projected/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-kube-api-access-glg54\") pod \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\" (UID: \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\") " Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.572157 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-combined-ca-bundle\") pod \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\" (UID: \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\") " Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.573433 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-logs\") pod \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\" (UID: \"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13\") " Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.573818 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-logs" (OuterVolumeSpecName: "logs") pod "733ca6fe-dc97-43ee-a95c-e7ed2bca9b13" (UID: "733ca6fe-dc97-43ee-a95c-e7ed2bca9b13"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.574183 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-logs\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.578224 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-kube-api-access-glg54" (OuterVolumeSpecName: "kube-api-access-glg54") pod "733ca6fe-dc97-43ee-a95c-e7ed2bca9b13" (UID: "733ca6fe-dc97-43ee-a95c-e7ed2bca9b13"). InnerVolumeSpecName "kube-api-access-glg54". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.622047 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "733ca6fe-dc97-43ee-a95c-e7ed2bca9b13" (UID: "733ca6fe-dc97-43ee-a95c-e7ed2bca9b13"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.642358 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-config-data" (OuterVolumeSpecName: "config-data") pod "733ca6fe-dc97-43ee-a95c-e7ed2bca9b13" (UID: "733ca6fe-dc97-43ee-a95c-e7ed2bca9b13"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.676015 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glg54\" (UniqueName: \"kubernetes.io/projected/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-kube-api-access-glg54\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.676069 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.676087 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.827120 4747 generic.go:334] "Generic (PLEG): container finished" podID="733ca6fe-dc97-43ee-a95c-e7ed2bca9b13" containerID="38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770" exitCode=0 Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.827163 4747 generic.go:334] "Generic (PLEG): container finished" podID="733ca6fe-dc97-43ee-a95c-e7ed2bca9b13" containerID="2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230" exitCode=143 Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.827218 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.827328 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13","Type":"ContainerDied","Data":"38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770"} Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.827372 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13","Type":"ContainerDied","Data":"2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230"} Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.827386 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"733ca6fe-dc97-43ee-a95c-e7ed2bca9b13","Type":"ContainerDied","Data":"0216af6977a07a1432c94d19bfd76b28446365ea6330c7fdc0208df40d9497c9"} Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.827409 4747 scope.go:117] "RemoveContainer" containerID="38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.860569 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.887278 4747 scope.go:117] "RemoveContainer" containerID="2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.895056 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.907388 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:05:59 crc kubenswrapper[4747]: E1202 17:05:59.907886 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="733ca6fe-dc97-43ee-a95c-e7ed2bca9b13" containerName="nova-metadata-metadata" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.907937 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="733ca6fe-dc97-43ee-a95c-e7ed2bca9b13" containerName="nova-metadata-metadata" Dec 02 17:05:59 crc kubenswrapper[4747]: E1202 17:05:59.907967 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="733ca6fe-dc97-43ee-a95c-e7ed2bca9b13" containerName="nova-metadata-log" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.907977 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="733ca6fe-dc97-43ee-a95c-e7ed2bca9b13" containerName="nova-metadata-log" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.908252 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="733ca6fe-dc97-43ee-a95c-e7ed2bca9b13" containerName="nova-metadata-log" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.908288 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="733ca6fe-dc97-43ee-a95c-e7ed2bca9b13" containerName="nova-metadata-metadata" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.909635 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.915285 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.915410 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.915692 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.923240 4747 scope.go:117] "RemoveContainer" containerID="38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770" Dec 02 17:05:59 crc kubenswrapper[4747]: E1202 17:05:59.924011 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770\": container with ID starting with 38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770 not found: ID does not exist" containerID="38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.924070 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770"} err="failed to get container status \"38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770\": rpc error: code = NotFound desc = could not find container \"38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770\": container with ID starting with 38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770 not found: ID does not exist" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.924107 4747 scope.go:117] "RemoveContainer" containerID="2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230" Dec 02 17:05:59 crc kubenswrapper[4747]: E1202 17:05:59.924675 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230\": container with ID starting with 2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230 not found: ID does not exist" containerID="2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.924705 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230"} err="failed to get container status \"2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230\": rpc error: code = NotFound desc = could not find container \"2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230\": container with ID starting with 2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230 not found: ID does not exist" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.924724 4747 scope.go:117] "RemoveContainer" containerID="38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.930070 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770"} err="failed to get container status \"38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770\": rpc error: code = NotFound desc = could not find container \"38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770\": container with ID starting with 38ad304186fa7e96958e7654aa5541a31d84e19a4790df32decd2c1becf23770 not found: ID does not exist" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.930181 4747 scope.go:117] "RemoveContainer" containerID="2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.930804 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230"} err="failed to get container status \"2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230\": rpc error: code = NotFound desc = could not find container \"2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230\": container with ID starting with 2b0535a5305f7c3da6363815e78eb4301a2e57e13dcbc953603aa17c93344230 not found: ID does not exist" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.981071 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-logs\") pod \"nova-metadata-0\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " pod="openstack/nova-metadata-0" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.981230 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " pod="openstack/nova-metadata-0" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.981397 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zx52v\" (UniqueName: \"kubernetes.io/projected/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-kube-api-access-zx52v\") pod \"nova-metadata-0\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " pod="openstack/nova-metadata-0" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.981435 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " pod="openstack/nova-metadata-0" Dec 02 17:05:59 crc kubenswrapper[4747]: I1202 17:05:59.981459 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-config-data\") pod \"nova-metadata-0\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " pod="openstack/nova-metadata-0" Dec 02 17:06:00 crc kubenswrapper[4747]: I1202 17:06:00.083894 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zx52v\" (UniqueName: \"kubernetes.io/projected/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-kube-api-access-zx52v\") pod \"nova-metadata-0\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " pod="openstack/nova-metadata-0" Dec 02 17:06:00 crc kubenswrapper[4747]: I1202 17:06:00.084006 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " pod="openstack/nova-metadata-0" Dec 02 17:06:00 crc kubenswrapper[4747]: I1202 17:06:00.084053 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-config-data\") pod \"nova-metadata-0\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " pod="openstack/nova-metadata-0" Dec 02 17:06:00 crc kubenswrapper[4747]: I1202 17:06:00.084109 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-logs\") pod \"nova-metadata-0\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " pod="openstack/nova-metadata-0" Dec 02 17:06:00 crc kubenswrapper[4747]: I1202 17:06:00.084253 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " pod="openstack/nova-metadata-0" Dec 02 17:06:00 crc kubenswrapper[4747]: I1202 17:06:00.084764 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-logs\") pod \"nova-metadata-0\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " pod="openstack/nova-metadata-0" Dec 02 17:06:00 crc kubenswrapper[4747]: I1202 17:06:00.089897 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " pod="openstack/nova-metadata-0" Dec 02 17:06:00 crc kubenswrapper[4747]: I1202 17:06:00.103720 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " pod="openstack/nova-metadata-0" Dec 02 17:06:00 crc kubenswrapper[4747]: I1202 17:06:00.104384 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-config-data\") pod \"nova-metadata-0\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " pod="openstack/nova-metadata-0" Dec 02 17:06:00 crc kubenswrapper[4747]: I1202 17:06:00.114617 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zx52v\" (UniqueName: \"kubernetes.io/projected/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-kube-api-access-zx52v\") pod \"nova-metadata-0\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " pod="openstack/nova-metadata-0" Dec 02 17:06:00 crc kubenswrapper[4747]: I1202 17:06:00.248890 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 17:06:00 crc kubenswrapper[4747]: I1202 17:06:00.775516 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:06:00 crc kubenswrapper[4747]: W1202 17:06:00.780124 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod06ff6ee6_40c3_4d2c_8052_92ca21e9b87e.slice/crio-c47dca3a462fa26f521cbdb3ebb9ba4a78611053741862b62fad5edff403fd7f WatchSource:0}: Error finding container c47dca3a462fa26f521cbdb3ebb9ba4a78611053741862b62fad5edff403fd7f: Status 404 returned error can't find the container with id c47dca3a462fa26f521cbdb3ebb9ba4a78611053741862b62fad5edff403fd7f Dec 02 17:06:00 crc kubenswrapper[4747]: I1202 17:06:00.845558 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e","Type":"ContainerStarted","Data":"c47dca3a462fa26f521cbdb3ebb9ba4a78611053741862b62fad5edff403fd7f"} Dec 02 17:06:01 crc kubenswrapper[4747]: I1202 17:06:01.774931 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="733ca6fe-dc97-43ee-a95c-e7ed2bca9b13" path="/var/lib/kubelet/pods/733ca6fe-dc97-43ee-a95c-e7ed2bca9b13/volumes" Dec 02 17:06:01 crc kubenswrapper[4747]: I1202 17:06:01.856961 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e","Type":"ContainerStarted","Data":"cc5ec97ccae82f79c124b56e826bfa1a7c5ca695d4f883851d5ff76aa4e538b0"} Dec 02 17:06:01 crc kubenswrapper[4747]: I1202 17:06:01.857038 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e","Type":"ContainerStarted","Data":"6df08aa63f0cf42e0f351792b36f9c1daf0e9a50b602243490f4cb7973b89c11"} Dec 02 17:06:01 crc kubenswrapper[4747]: I1202 17:06:01.886556 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.886499922 podStartE2EDuration="2.886499922s" podCreationTimestamp="2025-12-02 17:05:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:06:01.876060865 +0000 UTC m=+1392.402949634" watchObservedRunningTime="2025-12-02 17:06:01.886499922 +0000 UTC m=+1392.413388671" Dec 02 17:06:02 crc kubenswrapper[4747]: I1202 17:06:02.980998 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 02 17:06:02 crc kubenswrapper[4747]: I1202 17:06:02.981071 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 02 17:06:03 crc kubenswrapper[4747]: I1202 17:06:03.013738 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 02 17:06:03 crc kubenswrapper[4747]: I1202 17:06:03.277397 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 17:06:03 crc kubenswrapper[4747]: I1202 17:06:03.277481 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 17:06:03 crc kubenswrapper[4747]: I1202 17:06:03.388673 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:06:03 crc kubenswrapper[4747]: I1202 17:06:03.456547 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-lqspp"] Dec 02 17:06:03 crc kubenswrapper[4747]: I1202 17:06:03.456901 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5784cf869f-lqspp" podUID="2ec58fad-2ebd-4536-ae98-1f4537cbbffd" containerName="dnsmasq-dns" containerID="cri-o://6486edc5f982a0b8b8e8a64c9a7addb1d98d3309aabf7682782ec81ad54cb04e" gracePeriod=10 Dec 02 17:06:03 crc kubenswrapper[4747]: I1202 17:06:03.882147 4747 generic.go:334] "Generic (PLEG): container finished" podID="5ccc4c22-9c49-461c-9c53-404bdc932adc" containerID="f7f238f705e2364781e309764dd179ec75f3f55e29db68c35c61bdf58aece489" exitCode=0 Dec 02 17:06:03 crc kubenswrapper[4747]: I1202 17:06:03.882523 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-ktcxd" event={"ID":"5ccc4c22-9c49-461c-9c53-404bdc932adc","Type":"ContainerDied","Data":"f7f238f705e2364781e309764dd179ec75f3f55e29db68c35c61bdf58aece489"} Dec 02 17:06:03 crc kubenswrapper[4747]: I1202 17:06:03.885099 4747 generic.go:334] "Generic (PLEG): container finished" podID="d56a0e01-8f6b-409a-96c1-5b67f7fc3528" containerID="b2a319184355a79fb68f1ff9f62c6fbeed8b9acccb109dcb6d7f12630345fe7b" exitCode=0 Dec 02 17:06:03 crc kubenswrapper[4747]: I1202 17:06:03.885185 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-djdrn" event={"ID":"d56a0e01-8f6b-409a-96c1-5b67f7fc3528","Type":"ContainerDied","Data":"b2a319184355a79fb68f1ff9f62c6fbeed8b9acccb109dcb6d7f12630345fe7b"} Dec 02 17:06:03 crc kubenswrapper[4747]: I1202 17:06:03.888271 4747 generic.go:334] "Generic (PLEG): container finished" podID="2ec58fad-2ebd-4536-ae98-1f4537cbbffd" containerID="6486edc5f982a0b8b8e8a64c9a7addb1d98d3309aabf7682782ec81ad54cb04e" exitCode=0 Dec 02 17:06:03 crc kubenswrapper[4747]: I1202 17:06:03.888354 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-lqspp" event={"ID":"2ec58fad-2ebd-4536-ae98-1f4537cbbffd","Type":"ContainerDied","Data":"6486edc5f982a0b8b8e8a64c9a7addb1d98d3309aabf7682782ec81ad54cb04e"} Dec 02 17:06:03 crc kubenswrapper[4747]: I1202 17:06:03.944118 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.033149 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.171341 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-config\") pod \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.171565 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-ovsdbserver-nb\") pod \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.171637 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbf5q\" (UniqueName: \"kubernetes.io/projected/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-kube-api-access-rbf5q\") pod \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.171691 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-dns-svc\") pod \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.171725 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-dns-swift-storage-0\") pod \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.171762 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-ovsdbserver-sb\") pod \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\" (UID: \"2ec58fad-2ebd-4536-ae98-1f4537cbbffd\") " Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.180420 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-kube-api-access-rbf5q" (OuterVolumeSpecName: "kube-api-access-rbf5q") pod "2ec58fad-2ebd-4536-ae98-1f4537cbbffd" (UID: "2ec58fad-2ebd-4536-ae98-1f4537cbbffd"). InnerVolumeSpecName "kube-api-access-rbf5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.224093 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2ec58fad-2ebd-4536-ae98-1f4537cbbffd" (UID: "2ec58fad-2ebd-4536-ae98-1f4537cbbffd"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.231823 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2ec58fad-2ebd-4536-ae98-1f4537cbbffd" (UID: "2ec58fad-2ebd-4536-ae98-1f4537cbbffd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.241743 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2ec58fad-2ebd-4536-ae98-1f4537cbbffd" (UID: "2ec58fad-2ebd-4536-ae98-1f4537cbbffd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.246006 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2ec58fad-2ebd-4536-ae98-1f4537cbbffd" (UID: "2ec58fad-2ebd-4536-ae98-1f4537cbbffd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.253190 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-config" (OuterVolumeSpecName: "config") pod "2ec58fad-2ebd-4536-ae98-1f4537cbbffd" (UID: "2ec58fad-2ebd-4536-ae98-1f4537cbbffd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.275024 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.275087 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.275104 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.275121 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbf5q\" (UniqueName: \"kubernetes.io/projected/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-kube-api-access-rbf5q\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.275138 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.275149 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2ec58fad-2ebd-4536-ae98-1f4537cbbffd-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.359148 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4c922e9b-6e52-4b03-977a-224b26ddeee5" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.359884 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4c922e9b-6e52-4b03-977a-224b26ddeee5" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.903171 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-lqspp" Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.903366 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-lqspp" event={"ID":"2ec58fad-2ebd-4536-ae98-1f4537cbbffd","Type":"ContainerDied","Data":"8ba8b1277992974abf097659a8db2641d3c021cb0b2365efb0494f183cd31f63"} Dec 02 17:06:04 crc kubenswrapper[4747]: I1202 17:06:04.903426 4747 scope.go:117] "RemoveContainer" containerID="6486edc5f982a0b8b8e8a64c9a7addb1d98d3309aabf7682782ec81ad54cb04e" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.052671 4747 scope.go:117] "RemoveContainer" containerID="b66aeb84d7ee2e9a28e52933f514956c9094fecaf40f053c2ce02277c8d795ca" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.068476 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-lqspp"] Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.137376 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-lqspp"] Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.249355 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.249418 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.425253 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-ktcxd" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.545028 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-djdrn" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.609370 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khh2n\" (UniqueName: \"kubernetes.io/projected/5ccc4c22-9c49-461c-9c53-404bdc932adc-kube-api-access-khh2n\") pod \"5ccc4c22-9c49-461c-9c53-404bdc932adc\" (UID: \"5ccc4c22-9c49-461c-9c53-404bdc932adc\") " Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.609511 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-scripts\") pod \"5ccc4c22-9c49-461c-9c53-404bdc932adc\" (UID: \"5ccc4c22-9c49-461c-9c53-404bdc932adc\") " Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.609569 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-config-data\") pod \"5ccc4c22-9c49-461c-9c53-404bdc932adc\" (UID: \"5ccc4c22-9c49-461c-9c53-404bdc932adc\") " Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.609855 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-combined-ca-bundle\") pod \"5ccc4c22-9c49-461c-9c53-404bdc932adc\" (UID: \"5ccc4c22-9c49-461c-9c53-404bdc932adc\") " Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.618205 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ccc4c22-9c49-461c-9c53-404bdc932adc-kube-api-access-khh2n" (OuterVolumeSpecName: "kube-api-access-khh2n") pod "5ccc4c22-9c49-461c-9c53-404bdc932adc" (UID: "5ccc4c22-9c49-461c-9c53-404bdc932adc"). InnerVolumeSpecName "kube-api-access-khh2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.619035 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-scripts" (OuterVolumeSpecName: "scripts") pod "5ccc4c22-9c49-461c-9c53-404bdc932adc" (UID: "5ccc4c22-9c49-461c-9c53-404bdc932adc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.638582 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ccc4c22-9c49-461c-9c53-404bdc932adc" (UID: "5ccc4c22-9c49-461c-9c53-404bdc932adc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.639569 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-config-data" (OuterVolumeSpecName: "config-data") pod "5ccc4c22-9c49-461c-9c53-404bdc932adc" (UID: "5ccc4c22-9c49-461c-9c53-404bdc932adc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.711978 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-config-data\") pod \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\" (UID: \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\") " Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.712099 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dsq99\" (UniqueName: \"kubernetes.io/projected/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-kube-api-access-dsq99\") pod \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\" (UID: \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\") " Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.712258 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-combined-ca-bundle\") pod \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\" (UID: \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\") " Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.712342 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-scripts\") pod \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\" (UID: \"d56a0e01-8f6b-409a-96c1-5b67f7fc3528\") " Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.713148 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.713170 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khh2n\" (UniqueName: \"kubernetes.io/projected/5ccc4c22-9c49-461c-9c53-404bdc932adc-kube-api-access-khh2n\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.713190 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.713202 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ccc4c22-9c49-461c-9c53-404bdc932adc-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.715731 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-scripts" (OuterVolumeSpecName: "scripts") pod "d56a0e01-8f6b-409a-96c1-5b67f7fc3528" (UID: "d56a0e01-8f6b-409a-96c1-5b67f7fc3528"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.716378 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-kube-api-access-dsq99" (OuterVolumeSpecName: "kube-api-access-dsq99") pod "d56a0e01-8f6b-409a-96c1-5b67f7fc3528" (UID: "d56a0e01-8f6b-409a-96c1-5b67f7fc3528"). InnerVolumeSpecName "kube-api-access-dsq99". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.741792 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-config-data" (OuterVolumeSpecName: "config-data") pod "d56a0e01-8f6b-409a-96c1-5b67f7fc3528" (UID: "d56a0e01-8f6b-409a-96c1-5b67f7fc3528"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.741844 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d56a0e01-8f6b-409a-96c1-5b67f7fc3528" (UID: "d56a0e01-8f6b-409a-96c1-5b67f7fc3528"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.778136 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ec58fad-2ebd-4536-ae98-1f4537cbbffd" path="/var/lib/kubelet/pods/2ec58fad-2ebd-4536-ae98-1f4537cbbffd/volumes" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.827975 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.828026 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.828038 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.828052 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dsq99\" (UniqueName: \"kubernetes.io/projected/d56a0e01-8f6b-409a-96c1-5b67f7fc3528-kube-api-access-dsq99\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.914170 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-ktcxd" event={"ID":"5ccc4c22-9c49-461c-9c53-404bdc932adc","Type":"ContainerDied","Data":"6d5aaa98c298ecaafba1b88ab7567a9beb808fd9b5d45674228c782698b30465"} Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.914215 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d5aaa98c298ecaafba1b88ab7567a9beb808fd9b5d45674228c782698b30465" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.914272 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-ktcxd" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.926815 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-djdrn" event={"ID":"d56a0e01-8f6b-409a-96c1-5b67f7fc3528","Type":"ContainerDied","Data":"3737a2d57cc98db58cfaaa3d7efc80908da3e1f17534d8cab821ddf3c7460e06"} Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.926860 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3737a2d57cc98db58cfaaa3d7efc80908da3e1f17534d8cab821ddf3c7460e06" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.926941 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-djdrn" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.998838 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 02 17:06:05 crc kubenswrapper[4747]: E1202 17:06:05.999456 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d56a0e01-8f6b-409a-96c1-5b67f7fc3528" containerName="nova-manage" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.999492 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d56a0e01-8f6b-409a-96c1-5b67f7fc3528" containerName="nova-manage" Dec 02 17:06:05 crc kubenswrapper[4747]: E1202 17:06:05.999554 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ec58fad-2ebd-4536-ae98-1f4537cbbffd" containerName="init" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.999562 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ec58fad-2ebd-4536-ae98-1f4537cbbffd" containerName="init" Dec 02 17:06:05 crc kubenswrapper[4747]: E1202 17:06:05.999572 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ec58fad-2ebd-4536-ae98-1f4537cbbffd" containerName="dnsmasq-dns" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.999579 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ec58fad-2ebd-4536-ae98-1f4537cbbffd" containerName="dnsmasq-dns" Dec 02 17:06:05 crc kubenswrapper[4747]: E1202 17:06:05.999591 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ccc4c22-9c49-461c-9c53-404bdc932adc" containerName="nova-cell1-conductor-db-sync" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.999600 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ccc4c22-9c49-461c-9c53-404bdc932adc" containerName="nova-cell1-conductor-db-sync" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.999803 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ccc4c22-9c49-461c-9c53-404bdc932adc" containerName="nova-cell1-conductor-db-sync" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.999829 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d56a0e01-8f6b-409a-96c1-5b67f7fc3528" containerName="nova-manage" Dec 02 17:06:05 crc kubenswrapper[4747]: I1202 17:06:05.999852 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ec58fad-2ebd-4536-ae98-1f4537cbbffd" containerName="dnsmasq-dns" Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.000762 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.011178 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.033235 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lzhg\" (UniqueName: \"kubernetes.io/projected/3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e-kube-api-access-8lzhg\") pod \"nova-cell1-conductor-0\" (UID: \"3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e\") " pod="openstack/nova-cell1-conductor-0" Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.036408 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e\") " pod="openstack/nova-cell1-conductor-0" Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.036495 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e\") " pod="openstack/nova-cell1-conductor-0" Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.067071 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.139321 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lzhg\" (UniqueName: \"kubernetes.io/projected/3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e-kube-api-access-8lzhg\") pod \"nova-cell1-conductor-0\" (UID: \"3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e\") " pod="openstack/nova-cell1-conductor-0" Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.139406 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e\") " pod="openstack/nova-cell1-conductor-0" Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.139431 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e\") " pod="openstack/nova-cell1-conductor-0" Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.144950 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e\") " pod="openstack/nova-cell1-conductor-0" Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.161049 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e\") " pod="openstack/nova-cell1-conductor-0" Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.173746 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lzhg\" (UniqueName: \"kubernetes.io/projected/3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e-kube-api-access-8lzhg\") pod \"nova-cell1-conductor-0\" (UID: \"3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e\") " pod="openstack/nova-cell1-conductor-0" Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.194480 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.194847 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4c922e9b-6e52-4b03-977a-224b26ddeee5" containerName="nova-api-log" containerID="cri-o://269746c0e6d1c3d8b798c845af304a0893438ed83de31d144cc9c085ccea8f6c" gracePeriod=30 Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.195435 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4c922e9b-6e52-4b03-977a-224b26ddeee5" containerName="nova-api-api" containerID="cri-o://5262e4c331bf817e286e4b4699fdab1d653099813f8a1ff6148420e501ddd455" gracePeriod=30 Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.231110 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.231419 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="44e2aaf8-b5e0-4d63-95b1-8aac22e19a36" containerName="nova-scheduler-scheduler" containerID="cri-o://18c998486a7115ff63a08200494066db9081edf57764513a9d97024808463b3b" gracePeriod=30 Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.319692 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.598678 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.599310 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.601217 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="06ff6ee6-40c3-4d2c-8052-92ca21e9b87e" containerName="nova-metadata-log" containerID="cri-o://6df08aa63f0cf42e0f351792b36f9c1daf0e9a50b602243490f4cb7973b89c11" gracePeriod=30 Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.601422 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="06ff6ee6-40c3-4d2c-8052-92ca21e9b87e" containerName="nova-metadata-metadata" containerID="cri-o://cc5ec97ccae82f79c124b56e826bfa1a7c5ca695d4f883851d5ff76aa4e538b0" gracePeriod=30 Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.870171 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 02 17:06:06 crc kubenswrapper[4747]: W1202 17:06:06.878758 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a349a37_9c2c_4397_8fb3_ba6cdf8c5e2e.slice/crio-73a680dd2eacbab13d5d5ca5432dad6610579a2d1486f817f83f6d8084c4cbdf WatchSource:0}: Error finding container 73a680dd2eacbab13d5d5ca5432dad6610579a2d1486f817f83f6d8084c4cbdf: Status 404 returned error can't find the container with id 73a680dd2eacbab13d5d5ca5432dad6610579a2d1486f817f83f6d8084c4cbdf Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.967127 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e","Type":"ContainerStarted","Data":"73a680dd2eacbab13d5d5ca5432dad6610579a2d1486f817f83f6d8084c4cbdf"} Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.978826 4747 generic.go:334] "Generic (PLEG): container finished" podID="06ff6ee6-40c3-4d2c-8052-92ca21e9b87e" containerID="cc5ec97ccae82f79c124b56e826bfa1a7c5ca695d4f883851d5ff76aa4e538b0" exitCode=0 Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.978868 4747 generic.go:334] "Generic (PLEG): container finished" podID="06ff6ee6-40c3-4d2c-8052-92ca21e9b87e" containerID="6df08aa63f0cf42e0f351792b36f9c1daf0e9a50b602243490f4cb7973b89c11" exitCode=143 Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.979002 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e","Type":"ContainerDied","Data":"cc5ec97ccae82f79c124b56e826bfa1a7c5ca695d4f883851d5ff76aa4e538b0"} Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.979043 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e","Type":"ContainerDied","Data":"6df08aa63f0cf42e0f351792b36f9c1daf0e9a50b602243490f4cb7973b89c11"} Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.986706 4747 generic.go:334] "Generic (PLEG): container finished" podID="4c922e9b-6e52-4b03-977a-224b26ddeee5" containerID="269746c0e6d1c3d8b798c845af304a0893438ed83de31d144cc9c085ccea8f6c" exitCode=143 Dec 02 17:06:06 crc kubenswrapper[4747]: I1202 17:06:06.986787 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4c922e9b-6e52-4b03-977a-224b26ddeee5","Type":"ContainerDied","Data":"269746c0e6d1c3d8b798c845af304a0893438ed83de31d144cc9c085ccea8f6c"} Dec 02 17:06:07 crc kubenswrapper[4747]: I1202 17:06:07.818823 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 17:06:07 crc kubenswrapper[4747]: I1202 17:06:07.830088 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-combined-ca-bundle\") pod \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " Dec 02 17:06:07 crc kubenswrapper[4747]: I1202 17:06:07.830258 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zx52v\" (UniqueName: \"kubernetes.io/projected/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-kube-api-access-zx52v\") pod \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " Dec 02 17:06:07 crc kubenswrapper[4747]: I1202 17:06:07.830484 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-config-data\") pod \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " Dec 02 17:06:07 crc kubenswrapper[4747]: I1202 17:06:07.830593 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-logs\") pod \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " Dec 02 17:06:07 crc kubenswrapper[4747]: I1202 17:06:07.830726 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-nova-metadata-tls-certs\") pod \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\" (UID: \"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e\") " Dec 02 17:06:07 crc kubenswrapper[4747]: I1202 17:06:07.831133 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-logs" (OuterVolumeSpecName: "logs") pod "06ff6ee6-40c3-4d2c-8052-92ca21e9b87e" (UID: "06ff6ee6-40c3-4d2c-8052-92ca21e9b87e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:06:07 crc kubenswrapper[4747]: I1202 17:06:07.831648 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-logs\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:07 crc kubenswrapper[4747]: I1202 17:06:07.841230 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-kube-api-access-zx52v" (OuterVolumeSpecName: "kube-api-access-zx52v") pod "06ff6ee6-40c3-4d2c-8052-92ca21e9b87e" (UID: "06ff6ee6-40c3-4d2c-8052-92ca21e9b87e"). InnerVolumeSpecName "kube-api-access-zx52v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:06:07 crc kubenswrapper[4747]: I1202 17:06:07.876015 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-config-data" (OuterVolumeSpecName: "config-data") pod "06ff6ee6-40c3-4d2c-8052-92ca21e9b87e" (UID: "06ff6ee6-40c3-4d2c-8052-92ca21e9b87e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:07 crc kubenswrapper[4747]: I1202 17:06:07.877296 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "06ff6ee6-40c3-4d2c-8052-92ca21e9b87e" (UID: "06ff6ee6-40c3-4d2c-8052-92ca21e9b87e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:07 crc kubenswrapper[4747]: I1202 17:06:07.916924 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "06ff6ee6-40c3-4d2c-8052-92ca21e9b87e" (UID: "06ff6ee6-40c3-4d2c-8052-92ca21e9b87e"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:07 crc kubenswrapper[4747]: I1202 17:06:07.933251 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zx52v\" (UniqueName: \"kubernetes.io/projected/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-kube-api-access-zx52v\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:07 crc kubenswrapper[4747]: I1202 17:06:07.933298 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:07 crc kubenswrapper[4747]: I1202 17:06:07.933312 4747 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:07 crc kubenswrapper[4747]: I1202 17:06:07.933323 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:07 crc kubenswrapper[4747]: E1202 17:06:07.983427 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="18c998486a7115ff63a08200494066db9081edf57764513a9d97024808463b3b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 02 17:06:07 crc kubenswrapper[4747]: E1202 17:06:07.986191 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="18c998486a7115ff63a08200494066db9081edf57764513a9d97024808463b3b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 02 17:06:07 crc kubenswrapper[4747]: E1202 17:06:07.988517 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="18c998486a7115ff63a08200494066db9081edf57764513a9d97024808463b3b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 02 17:06:07 crc kubenswrapper[4747]: E1202 17:06:07.989575 4747 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="44e2aaf8-b5e0-4d63-95b1-8aac22e19a36" containerName="nova-scheduler-scheduler" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.005543 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.005567 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"06ff6ee6-40c3-4d2c-8052-92ca21e9b87e","Type":"ContainerDied","Data":"c47dca3a462fa26f521cbdb3ebb9ba4a78611053741862b62fad5edff403fd7f"} Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.005655 4747 scope.go:117] "RemoveContainer" containerID="cc5ec97ccae82f79c124b56e826bfa1a7c5ca695d4f883851d5ff76aa4e538b0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.009984 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e","Type":"ContainerStarted","Data":"0b35f5378110500fea65083fd8aeaa4164d26365e42bac81b2232475f8ec5ce9"} Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.011809 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.035208 4747 scope.go:117] "RemoveContainer" containerID="6df08aa63f0cf42e0f351792b36f9c1daf0e9a50b602243490f4cb7973b89c11" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.042063 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=3.042027951 podStartE2EDuration="3.042027951s" podCreationTimestamp="2025-12-02 17:06:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:06:08.027687963 +0000 UTC m=+1398.554576732" watchObservedRunningTime="2025-12-02 17:06:08.042027951 +0000 UTC m=+1398.568916700" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.077679 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.096758 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.119091 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:06:08 crc kubenswrapper[4747]: E1202 17:06:08.119791 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ff6ee6-40c3-4d2c-8052-92ca21e9b87e" containerName="nova-metadata-log" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.119812 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ff6ee6-40c3-4d2c-8052-92ca21e9b87e" containerName="nova-metadata-log" Dec 02 17:06:08 crc kubenswrapper[4747]: E1202 17:06:08.119852 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ff6ee6-40c3-4d2c-8052-92ca21e9b87e" containerName="nova-metadata-metadata" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.119861 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ff6ee6-40c3-4d2c-8052-92ca21e9b87e" containerName="nova-metadata-metadata" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.120092 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ff6ee6-40c3-4d2c-8052-92ca21e9b87e" containerName="nova-metadata-metadata" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.120122 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ff6ee6-40c3-4d2c-8052-92ca21e9b87e" containerName="nova-metadata-log" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.121427 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.168430 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.168651 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.168784 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.172984 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fbb7413-56c4-4018-9e53-584b138878c1-logs\") pod \"nova-metadata-0\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.173070 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-config-data\") pod \"nova-metadata-0\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.173303 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjvqz\" (UniqueName: \"kubernetes.io/projected/6fbb7413-56c4-4018-9e53-584b138878c1-kube-api-access-rjvqz\") pod \"nova-metadata-0\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.173366 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.173447 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.275239 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fbb7413-56c4-4018-9e53-584b138878c1-logs\") pod \"nova-metadata-0\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.275300 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-config-data\") pod \"nova-metadata-0\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.275360 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjvqz\" (UniqueName: \"kubernetes.io/projected/6fbb7413-56c4-4018-9e53-584b138878c1-kube-api-access-rjvqz\") pod \"nova-metadata-0\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.275391 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.275432 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.276501 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fbb7413-56c4-4018-9e53-584b138878c1-logs\") pod \"nova-metadata-0\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.279865 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.280766 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-config-data\") pod \"nova-metadata-0\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.280953 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.294735 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjvqz\" (UniqueName: \"kubernetes.io/projected/6fbb7413-56c4-4018-9e53-584b138878c1-kube-api-access-rjvqz\") pod \"nova-metadata-0\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.495970 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 17:06:08 crc kubenswrapper[4747]: W1202 17:06:08.773889 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6fbb7413_56c4_4018_9e53_584b138878c1.slice/crio-d325b2d7262395b831274377b460615c540f2c7abde804ded79d475293c379cd WatchSource:0}: Error finding container d325b2d7262395b831274377b460615c540f2c7abde804ded79d475293c379cd: Status 404 returned error can't find the container with id d325b2d7262395b831274377b460615c540f2c7abde804ded79d475293c379cd Dec 02 17:06:08 crc kubenswrapper[4747]: I1202 17:06:08.776150 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:06:09 crc kubenswrapper[4747]: I1202 17:06:09.024640 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6fbb7413-56c4-4018-9e53-584b138878c1","Type":"ContainerStarted","Data":"bcb8882aab554a7e57c619592e3c367a37fcaa670315509ba4ac42a8b278505c"} Dec 02 17:06:09 crc kubenswrapper[4747]: I1202 17:06:09.027117 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6fbb7413-56c4-4018-9e53-584b138878c1","Type":"ContainerStarted","Data":"d325b2d7262395b831274377b460615c540f2c7abde804ded79d475293c379cd"} Dec 02 17:06:09 crc kubenswrapper[4747]: I1202 17:06:09.777352 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06ff6ee6-40c3-4d2c-8052-92ca21e9b87e" path="/var/lib/kubelet/pods/06ff6ee6-40c3-4d2c-8052-92ca21e9b87e/volumes" Dec 02 17:06:10 crc kubenswrapper[4747]: I1202 17:06:10.044464 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6fbb7413-56c4-4018-9e53-584b138878c1","Type":"ContainerStarted","Data":"58e1b0a64205d98eeebd6dbe46c64b37c085d5e5c2bc03f0d12a06772bca7d73"} Dec 02 17:06:10 crc kubenswrapper[4747]: I1202 17:06:10.082197 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.08217595 podStartE2EDuration="2.08217595s" podCreationTimestamp="2025-12-02 17:06:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:06:10.081896662 +0000 UTC m=+1400.608785421" watchObservedRunningTime="2025-12-02 17:06:10.08217595 +0000 UTC m=+1400.609064699" Dec 02 17:06:10 crc kubenswrapper[4747]: I1202 17:06:10.512828 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 17:06:10 crc kubenswrapper[4747]: I1202 17:06:10.540157 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8r57\" (UniqueName: \"kubernetes.io/projected/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-kube-api-access-w8r57\") pod \"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36\" (UID: \"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36\") " Dec 02 17:06:10 crc kubenswrapper[4747]: I1202 17:06:10.540336 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-combined-ca-bundle\") pod \"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36\" (UID: \"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36\") " Dec 02 17:06:10 crc kubenswrapper[4747]: I1202 17:06:10.540363 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-config-data\") pod \"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36\" (UID: \"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36\") " Dec 02 17:06:10 crc kubenswrapper[4747]: I1202 17:06:10.550074 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-kube-api-access-w8r57" (OuterVolumeSpecName: "kube-api-access-w8r57") pod "44e2aaf8-b5e0-4d63-95b1-8aac22e19a36" (UID: "44e2aaf8-b5e0-4d63-95b1-8aac22e19a36"). InnerVolumeSpecName "kube-api-access-w8r57". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:06:10 crc kubenswrapper[4747]: I1202 17:06:10.586195 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "44e2aaf8-b5e0-4d63-95b1-8aac22e19a36" (UID: "44e2aaf8-b5e0-4d63-95b1-8aac22e19a36"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:10 crc kubenswrapper[4747]: I1202 17:06:10.589019 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-config-data" (OuterVolumeSpecName: "config-data") pod "44e2aaf8-b5e0-4d63-95b1-8aac22e19a36" (UID: "44e2aaf8-b5e0-4d63-95b1-8aac22e19a36"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:10 crc kubenswrapper[4747]: I1202 17:06:10.644174 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:10 crc kubenswrapper[4747]: I1202 17:06:10.644221 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:10 crc kubenswrapper[4747]: I1202 17:06:10.644236 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8r57\" (UniqueName: \"kubernetes.io/projected/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36-kube-api-access-w8r57\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.065053 4747 generic.go:334] "Generic (PLEG): container finished" podID="44e2aaf8-b5e0-4d63-95b1-8aac22e19a36" containerID="18c998486a7115ff63a08200494066db9081edf57764513a9d97024808463b3b" exitCode=0 Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.065529 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.065651 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36","Type":"ContainerDied","Data":"18c998486a7115ff63a08200494066db9081edf57764513a9d97024808463b3b"} Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.065707 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"44e2aaf8-b5e0-4d63-95b1-8aac22e19a36","Type":"ContainerDied","Data":"11290fd14adadb225f4eb2dc42731e096b25befada6f9bee67fce4892f31b12e"} Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.065726 4747 scope.go:117] "RemoveContainer" containerID="18c998486a7115ff63a08200494066db9081edf57764513a9d97024808463b3b" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.073612 4747 generic.go:334] "Generic (PLEG): container finished" podID="4c922e9b-6e52-4b03-977a-224b26ddeee5" containerID="5262e4c331bf817e286e4b4699fdab1d653099813f8a1ff6148420e501ddd455" exitCode=0 Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.074978 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4c922e9b-6e52-4b03-977a-224b26ddeee5","Type":"ContainerDied","Data":"5262e4c331bf817e286e4b4699fdab1d653099813f8a1ff6148420e501ddd455"} Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.113183 4747 scope.go:117] "RemoveContainer" containerID="18c998486a7115ff63a08200494066db9081edf57764513a9d97024808463b3b" Dec 02 17:06:11 crc kubenswrapper[4747]: E1202 17:06:11.114027 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18c998486a7115ff63a08200494066db9081edf57764513a9d97024808463b3b\": container with ID starting with 18c998486a7115ff63a08200494066db9081edf57764513a9d97024808463b3b not found: ID does not exist" containerID="18c998486a7115ff63a08200494066db9081edf57764513a9d97024808463b3b" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.114080 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18c998486a7115ff63a08200494066db9081edf57764513a9d97024808463b3b"} err="failed to get container status \"18c998486a7115ff63a08200494066db9081edf57764513a9d97024808463b3b\": rpc error: code = NotFound desc = could not find container \"18c998486a7115ff63a08200494066db9081edf57764513a9d97024808463b3b\": container with ID starting with 18c998486a7115ff63a08200494066db9081edf57764513a9d97024808463b3b not found: ID does not exist" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.118763 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.127806 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.158272 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 17:06:11 crc kubenswrapper[4747]: E1202 17:06:11.158815 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44e2aaf8-b5e0-4d63-95b1-8aac22e19a36" containerName="nova-scheduler-scheduler" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.158838 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="44e2aaf8-b5e0-4d63-95b1-8aac22e19a36" containerName="nova-scheduler-scheduler" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.159137 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="44e2aaf8-b5e0-4d63-95b1-8aac22e19a36" containerName="nova-scheduler-scheduler" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.159974 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.167420 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.182752 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.258207 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.259530 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae6eb70b-b206-4849-8120-bc08f4318de1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ae6eb70b-b206-4849-8120-bc08f4318de1\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.259693 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae6eb70b-b206-4849-8120-bc08f4318de1-config-data\") pod \"nova-scheduler-0\" (UID: \"ae6eb70b-b206-4849-8120-bc08f4318de1\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.259852 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94hfg\" (UniqueName: \"kubernetes.io/projected/ae6eb70b-b206-4849-8120-bc08f4318de1-kube-api-access-94hfg\") pod \"nova-scheduler-0\" (UID: \"ae6eb70b-b206-4849-8120-bc08f4318de1\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.361768 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c922e9b-6e52-4b03-977a-224b26ddeee5-combined-ca-bundle\") pod \"4c922e9b-6e52-4b03-977a-224b26ddeee5\" (UID: \"4c922e9b-6e52-4b03-977a-224b26ddeee5\") " Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.362269 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c922e9b-6e52-4b03-977a-224b26ddeee5-config-data\") pod \"4c922e9b-6e52-4b03-977a-224b26ddeee5\" (UID: \"4c922e9b-6e52-4b03-977a-224b26ddeee5\") " Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.362523 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-774rp\" (UniqueName: \"kubernetes.io/projected/4c922e9b-6e52-4b03-977a-224b26ddeee5-kube-api-access-774rp\") pod \"4c922e9b-6e52-4b03-977a-224b26ddeee5\" (UID: \"4c922e9b-6e52-4b03-977a-224b26ddeee5\") " Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.362687 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c922e9b-6e52-4b03-977a-224b26ddeee5-logs\") pod \"4c922e9b-6e52-4b03-977a-224b26ddeee5\" (UID: \"4c922e9b-6e52-4b03-977a-224b26ddeee5\") " Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.363045 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c922e9b-6e52-4b03-977a-224b26ddeee5-logs" (OuterVolumeSpecName: "logs") pod "4c922e9b-6e52-4b03-977a-224b26ddeee5" (UID: "4c922e9b-6e52-4b03-977a-224b26ddeee5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.363380 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae6eb70b-b206-4849-8120-bc08f4318de1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ae6eb70b-b206-4849-8120-bc08f4318de1\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.363511 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae6eb70b-b206-4849-8120-bc08f4318de1-config-data\") pod \"nova-scheduler-0\" (UID: \"ae6eb70b-b206-4849-8120-bc08f4318de1\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.363621 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94hfg\" (UniqueName: \"kubernetes.io/projected/ae6eb70b-b206-4849-8120-bc08f4318de1-kube-api-access-94hfg\") pod \"nova-scheduler-0\" (UID: \"ae6eb70b-b206-4849-8120-bc08f4318de1\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.363769 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c922e9b-6e52-4b03-977a-224b26ddeee5-logs\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.370240 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c922e9b-6e52-4b03-977a-224b26ddeee5-kube-api-access-774rp" (OuterVolumeSpecName: "kube-api-access-774rp") pod "4c922e9b-6e52-4b03-977a-224b26ddeee5" (UID: "4c922e9b-6e52-4b03-977a-224b26ddeee5"). InnerVolumeSpecName "kube-api-access-774rp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.373261 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae6eb70b-b206-4849-8120-bc08f4318de1-config-data\") pod \"nova-scheduler-0\" (UID: \"ae6eb70b-b206-4849-8120-bc08f4318de1\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.374779 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae6eb70b-b206-4849-8120-bc08f4318de1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ae6eb70b-b206-4849-8120-bc08f4318de1\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.384124 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94hfg\" (UniqueName: \"kubernetes.io/projected/ae6eb70b-b206-4849-8120-bc08f4318de1-kube-api-access-94hfg\") pod \"nova-scheduler-0\" (UID: \"ae6eb70b-b206-4849-8120-bc08f4318de1\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.397270 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c922e9b-6e52-4b03-977a-224b26ddeee5-config-data" (OuterVolumeSpecName: "config-data") pod "4c922e9b-6e52-4b03-977a-224b26ddeee5" (UID: "4c922e9b-6e52-4b03-977a-224b26ddeee5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.404318 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c922e9b-6e52-4b03-977a-224b26ddeee5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c922e9b-6e52-4b03-977a-224b26ddeee5" (UID: "4c922e9b-6e52-4b03-977a-224b26ddeee5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.470176 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c922e9b-6e52-4b03-977a-224b26ddeee5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.470483 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c922e9b-6e52-4b03-977a-224b26ddeee5-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.470548 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-774rp\" (UniqueName: \"kubernetes.io/projected/4c922e9b-6e52-4b03-977a-224b26ddeee5-kube-api-access-774rp\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.582409 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.677694 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.678025 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="fc2871c5-3506-4ae4-9881-8c184d57c2e1" containerName="kube-state-metrics" containerID="cri-o://c05c72e5c32d5b462b338877cf90a47a6481775e0f0255c1117409d13c816105" gracePeriod=30 Dec 02 17:06:11 crc kubenswrapper[4747]: I1202 17:06:11.775086 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44e2aaf8-b5e0-4d63-95b1-8aac22e19a36" path="/var/lib/kubelet/pods/44e2aaf8-b5e0-4d63-95b1-8aac22e19a36/volumes" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.137060 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4c922e9b-6e52-4b03-977a-224b26ddeee5","Type":"ContainerDied","Data":"c9c8f8aea5f31590cb697174cd1b7f61a8727b417af726e03456bd1feffbef48"} Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.137489 4747 scope.go:117] "RemoveContainer" containerID="5262e4c331bf817e286e4b4699fdab1d653099813f8a1ff6148420e501ddd455" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.137884 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.154309 4747 generic.go:334] "Generic (PLEG): container finished" podID="fc2871c5-3506-4ae4-9881-8c184d57c2e1" containerID="c05c72e5c32d5b462b338877cf90a47a6481775e0f0255c1117409d13c816105" exitCode=2 Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.154364 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"fc2871c5-3506-4ae4-9881-8c184d57c2e1","Type":"ContainerDied","Data":"c05c72e5c32d5b462b338877cf90a47a6481775e0f0255c1117409d13c816105"} Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.173504 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.312352 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.312942 4747 scope.go:117] "RemoveContainer" containerID="269746c0e6d1c3d8b798c845af304a0893438ed83de31d144cc9c085ccea8f6c" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.349213 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.360204 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.372091 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:12 crc kubenswrapper[4747]: E1202 17:06:12.372974 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c922e9b-6e52-4b03-977a-224b26ddeee5" containerName="nova-api-log" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.373069 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c922e9b-6e52-4b03-977a-224b26ddeee5" containerName="nova-api-log" Dec 02 17:06:12 crc kubenswrapper[4747]: E1202 17:06:12.373214 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc2871c5-3506-4ae4-9881-8c184d57c2e1" containerName="kube-state-metrics" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.373295 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc2871c5-3506-4ae4-9881-8c184d57c2e1" containerName="kube-state-metrics" Dec 02 17:06:12 crc kubenswrapper[4747]: E1202 17:06:12.373384 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c922e9b-6e52-4b03-977a-224b26ddeee5" containerName="nova-api-api" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.373454 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c922e9b-6e52-4b03-977a-224b26ddeee5" containerName="nova-api-api" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.373761 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc2871c5-3506-4ae4-9881-8c184d57c2e1" containerName="kube-state-metrics" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.373864 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c922e9b-6e52-4b03-977a-224b26ddeee5" containerName="nova-api-log" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.374013 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c922e9b-6e52-4b03-977a-224b26ddeee5" containerName="nova-api-api" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.375660 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.384563 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.406676 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bd6s\" (UniqueName: \"kubernetes.io/projected/fc2871c5-3506-4ae4-9881-8c184d57c2e1-kube-api-access-6bd6s\") pod \"fc2871c5-3506-4ae4-9881-8c184d57c2e1\" (UID: \"fc2871c5-3506-4ae4-9881-8c184d57c2e1\") " Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.408024 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f844f614-cade-44e0-af45-c7f1f3df847c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f844f614-cade-44e0-af45-c7f1f3df847c\") " pod="openstack/nova-api-0" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.408266 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcp9j\" (UniqueName: \"kubernetes.io/projected/f844f614-cade-44e0-af45-c7f1f3df847c-kube-api-access-bcp9j\") pod \"nova-api-0\" (UID: \"f844f614-cade-44e0-af45-c7f1f3df847c\") " pod="openstack/nova-api-0" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.408396 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f844f614-cade-44e0-af45-c7f1f3df847c-logs\") pod \"nova-api-0\" (UID: \"f844f614-cade-44e0-af45-c7f1f3df847c\") " pod="openstack/nova-api-0" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.408707 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f844f614-cade-44e0-af45-c7f1f3df847c-config-data\") pod \"nova-api-0\" (UID: \"f844f614-cade-44e0-af45-c7f1f3df847c\") " pod="openstack/nova-api-0" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.412297 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.418115 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc2871c5-3506-4ae4-9881-8c184d57c2e1-kube-api-access-6bd6s" (OuterVolumeSpecName: "kube-api-access-6bd6s") pod "fc2871c5-3506-4ae4-9881-8c184d57c2e1" (UID: "fc2871c5-3506-4ae4-9881-8c184d57c2e1"). InnerVolumeSpecName "kube-api-access-6bd6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.510571 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f844f614-cade-44e0-af45-c7f1f3df847c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f844f614-cade-44e0-af45-c7f1f3df847c\") " pod="openstack/nova-api-0" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.511262 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcp9j\" (UniqueName: \"kubernetes.io/projected/f844f614-cade-44e0-af45-c7f1f3df847c-kube-api-access-bcp9j\") pod \"nova-api-0\" (UID: \"f844f614-cade-44e0-af45-c7f1f3df847c\") " pod="openstack/nova-api-0" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.511347 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f844f614-cade-44e0-af45-c7f1f3df847c-logs\") pod \"nova-api-0\" (UID: \"f844f614-cade-44e0-af45-c7f1f3df847c\") " pod="openstack/nova-api-0" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.511547 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f844f614-cade-44e0-af45-c7f1f3df847c-config-data\") pod \"nova-api-0\" (UID: \"f844f614-cade-44e0-af45-c7f1f3df847c\") " pod="openstack/nova-api-0" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.511689 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bd6s\" (UniqueName: \"kubernetes.io/projected/fc2871c5-3506-4ae4-9881-8c184d57c2e1-kube-api-access-6bd6s\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.511760 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f844f614-cade-44e0-af45-c7f1f3df847c-logs\") pod \"nova-api-0\" (UID: \"f844f614-cade-44e0-af45-c7f1f3df847c\") " pod="openstack/nova-api-0" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.517729 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f844f614-cade-44e0-af45-c7f1f3df847c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f844f614-cade-44e0-af45-c7f1f3df847c\") " pod="openstack/nova-api-0" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.518198 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f844f614-cade-44e0-af45-c7f1f3df847c-config-data\") pod \"nova-api-0\" (UID: \"f844f614-cade-44e0-af45-c7f1f3df847c\") " pod="openstack/nova-api-0" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.530455 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcp9j\" (UniqueName: \"kubernetes.io/projected/f844f614-cade-44e0-af45-c7f1f3df847c-kube-api-access-bcp9j\") pod \"nova-api-0\" (UID: \"f844f614-cade-44e0-af45-c7f1f3df847c\") " pod="openstack/nova-api-0" Dec 02 17:06:12 crc kubenswrapper[4747]: I1202 17:06:12.721486 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.215136 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"fc2871c5-3506-4ae4-9881-8c184d57c2e1","Type":"ContainerDied","Data":"178f8f0688b5065ec91692235aef484a5b61d7b0be13d0ee4f19cca388410455"} Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.215528 4747 scope.go:117] "RemoveContainer" containerID="c05c72e5c32d5b462b338877cf90a47a6481775e0f0255c1117409d13c816105" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.215638 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.222871 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ae6eb70b-b206-4849-8120-bc08f4318de1","Type":"ContainerStarted","Data":"2c335faf751e2e358fdd98bbbbaabef9ab96beed72118f6f32a5bdc4fc7b52f2"} Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.223022 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ae6eb70b-b206-4849-8120-bc08f4318de1","Type":"ContainerStarted","Data":"e40ba6bc4055bb4e47f2b446fe43ceb9533858766b7c3cfef7a85961a61b562b"} Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.269963 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.282413 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.28239224 podStartE2EDuration="2.28239224s" podCreationTimestamp="2025-12-02 17:06:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:06:13.24513945 +0000 UTC m=+1403.772028199" watchObservedRunningTime="2025-12-02 17:06:13.28239224 +0000 UTC m=+1403.809280979" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.302085 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.313069 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.324810 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.326120 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.328716 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.329294 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.340247 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.431965 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2985\" (UniqueName: \"kubernetes.io/projected/f7425e95-34bc-464a-8d30-74ec67cd1760-kube-api-access-g2985\") pod \"kube-state-metrics-0\" (UID: \"f7425e95-34bc-464a-8d30-74ec67cd1760\") " pod="openstack/kube-state-metrics-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.432058 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f7425e95-34bc-464a-8d30-74ec67cd1760-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f7425e95-34bc-464a-8d30-74ec67cd1760\") " pod="openstack/kube-state-metrics-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.432088 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7425e95-34bc-464a-8d30-74ec67cd1760-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f7425e95-34bc-464a-8d30-74ec67cd1760\") " pod="openstack/kube-state-metrics-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.432127 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7425e95-34bc-464a-8d30-74ec67cd1760-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f7425e95-34bc-464a-8d30-74ec67cd1760\") " pod="openstack/kube-state-metrics-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.496193 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.496282 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.535362 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2985\" (UniqueName: \"kubernetes.io/projected/f7425e95-34bc-464a-8d30-74ec67cd1760-kube-api-access-g2985\") pod \"kube-state-metrics-0\" (UID: \"f7425e95-34bc-464a-8d30-74ec67cd1760\") " pod="openstack/kube-state-metrics-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.535565 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f7425e95-34bc-464a-8d30-74ec67cd1760-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f7425e95-34bc-464a-8d30-74ec67cd1760\") " pod="openstack/kube-state-metrics-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.535613 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7425e95-34bc-464a-8d30-74ec67cd1760-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f7425e95-34bc-464a-8d30-74ec67cd1760\") " pod="openstack/kube-state-metrics-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.535668 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7425e95-34bc-464a-8d30-74ec67cd1760-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f7425e95-34bc-464a-8d30-74ec67cd1760\") " pod="openstack/kube-state-metrics-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.543161 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7425e95-34bc-464a-8d30-74ec67cd1760-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f7425e95-34bc-464a-8d30-74ec67cd1760\") " pod="openstack/kube-state-metrics-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.543551 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f7425e95-34bc-464a-8d30-74ec67cd1760-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f7425e95-34bc-464a-8d30-74ec67cd1760\") " pod="openstack/kube-state-metrics-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.544558 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7425e95-34bc-464a-8d30-74ec67cd1760-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f7425e95-34bc-464a-8d30-74ec67cd1760\") " pod="openstack/kube-state-metrics-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.561304 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2985\" (UniqueName: \"kubernetes.io/projected/f7425e95-34bc-464a-8d30-74ec67cd1760-kube-api-access-g2985\") pod \"kube-state-metrics-0\" (UID: \"f7425e95-34bc-464a-8d30-74ec67cd1760\") " pod="openstack/kube-state-metrics-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.718025 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.774200 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c922e9b-6e52-4b03-977a-224b26ddeee5" path="/var/lib/kubelet/pods/4c922e9b-6e52-4b03-977a-224b26ddeee5/volumes" Dec 02 17:06:13 crc kubenswrapper[4747]: I1202 17:06:13.774940 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc2871c5-3506-4ae4-9881-8c184d57c2e1" path="/var/lib/kubelet/pods/fc2871c5-3506-4ae4-9881-8c184d57c2e1/volumes" Dec 02 17:06:14 crc kubenswrapper[4747]: I1202 17:06:14.003319 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:06:14 crc kubenswrapper[4747]: I1202 17:06:14.003711 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerName="ceilometer-central-agent" containerID="cri-o://6d96f62471ac1a7d4c748c425c7393cb211974c84ab4a4946368583fa3cf4988" gracePeriod=30 Dec 02 17:06:14 crc kubenswrapper[4747]: I1202 17:06:14.004401 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerName="proxy-httpd" containerID="cri-o://c6095ce977dbcdf71c8c810fce98ffca6a3f85d9bbe696ba5b013fd041e8fa67" gracePeriod=30 Dec 02 17:06:14 crc kubenswrapper[4747]: I1202 17:06:14.004496 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerName="ceilometer-notification-agent" containerID="cri-o://3f3f664e9e1227bdcb6ca25ec3a30f39d94ca5bed8b9b97c1ee22266f204e5a7" gracePeriod=30 Dec 02 17:06:14 crc kubenswrapper[4747]: I1202 17:06:14.004579 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerName="sg-core" containerID="cri-o://9eb04a61eccf9471f81811ad4f981829796d47352cb7e867291f0be2a783aece" gracePeriod=30 Dec 02 17:06:14 crc kubenswrapper[4747]: I1202 17:06:14.301821 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 17:06:14 crc kubenswrapper[4747]: I1202 17:06:14.354993 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f844f614-cade-44e0-af45-c7f1f3df847c","Type":"ContainerStarted","Data":"3de2d67c4079a243fc66fe0bdd58120f71797806e79946551e02c21eaf78a355"} Dec 02 17:06:14 crc kubenswrapper[4747]: I1202 17:06:14.355051 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f844f614-cade-44e0-af45-c7f1f3df847c","Type":"ContainerStarted","Data":"883eba882453479ae58febf015fe5f63def6e1a659415f5376ef769101a560ee"} Dec 02 17:06:15 crc kubenswrapper[4747]: E1202 17:06:15.232292 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d778f77_b54a_4ef2_b446_baa6ec3147ca.slice/crio-6d96f62471ac1a7d4c748c425c7393cb211974c84ab4a4946368583fa3cf4988.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d778f77_b54a_4ef2_b446_baa6ec3147ca.slice/crio-conmon-6d96f62471ac1a7d4c748c425c7393cb211974c84ab4a4946368583fa3cf4988.scope\": RecentStats: unable to find data in memory cache]" Dec 02 17:06:15 crc kubenswrapper[4747]: I1202 17:06:15.367856 4747 generic.go:334] "Generic (PLEG): container finished" podID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerID="c6095ce977dbcdf71c8c810fce98ffca6a3f85d9bbe696ba5b013fd041e8fa67" exitCode=0 Dec 02 17:06:15 crc kubenswrapper[4747]: I1202 17:06:15.368377 4747 generic.go:334] "Generic (PLEG): container finished" podID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerID="9eb04a61eccf9471f81811ad4f981829796d47352cb7e867291f0be2a783aece" exitCode=2 Dec 02 17:06:15 crc kubenswrapper[4747]: I1202 17:06:15.368393 4747 generic.go:334] "Generic (PLEG): container finished" podID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerID="6d96f62471ac1a7d4c748c425c7393cb211974c84ab4a4946368583fa3cf4988" exitCode=0 Dec 02 17:06:15 crc kubenswrapper[4747]: I1202 17:06:15.368088 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d778f77-b54a-4ef2-b446-baa6ec3147ca","Type":"ContainerDied","Data":"c6095ce977dbcdf71c8c810fce98ffca6a3f85d9bbe696ba5b013fd041e8fa67"} Dec 02 17:06:15 crc kubenswrapper[4747]: I1202 17:06:15.368497 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d778f77-b54a-4ef2-b446-baa6ec3147ca","Type":"ContainerDied","Data":"9eb04a61eccf9471f81811ad4f981829796d47352cb7e867291f0be2a783aece"} Dec 02 17:06:15 crc kubenswrapper[4747]: I1202 17:06:15.368521 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d778f77-b54a-4ef2-b446-baa6ec3147ca","Type":"ContainerDied","Data":"6d96f62471ac1a7d4c748c425c7393cb211974c84ab4a4946368583fa3cf4988"} Dec 02 17:06:15 crc kubenswrapper[4747]: I1202 17:06:15.371651 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f7425e95-34bc-464a-8d30-74ec67cd1760","Type":"ContainerStarted","Data":"35120f63bb4fa5c39ba147074d5103a4f9abe28740456dd1a5205aae8138b784"} Dec 02 17:06:15 crc kubenswrapper[4747]: I1202 17:06:15.371682 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f7425e95-34bc-464a-8d30-74ec67cd1760","Type":"ContainerStarted","Data":"12462f9c87e634dc58d9c51d15b0fa0df339eaabee620f5341fb5901534f0ea9"} Dec 02 17:06:15 crc kubenswrapper[4747]: I1202 17:06:15.371780 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 02 17:06:15 crc kubenswrapper[4747]: I1202 17:06:15.375081 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f844f614-cade-44e0-af45-c7f1f3df847c","Type":"ContainerStarted","Data":"c8312e20623d52f35bfdea34a144c692e1a02a4cdbe6c2117d05c1bb5de7ba6e"} Dec 02 17:06:15 crc kubenswrapper[4747]: I1202 17:06:15.406476 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.926597747 podStartE2EDuration="2.40644909s" podCreationTimestamp="2025-12-02 17:06:13 +0000 UTC" firstStartedPulling="2025-12-02 17:06:14.493276729 +0000 UTC m=+1405.020165478" lastFinishedPulling="2025-12-02 17:06:14.973128072 +0000 UTC m=+1405.500016821" observedRunningTime="2025-12-02 17:06:15.393849221 +0000 UTC m=+1405.920737980" watchObservedRunningTime="2025-12-02 17:06:15.40644909 +0000 UTC m=+1405.933337839" Dec 02 17:06:15 crc kubenswrapper[4747]: I1202 17:06:15.435570 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.435539128 podStartE2EDuration="3.435539128s" podCreationTimestamp="2025-12-02 17:06:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:06:15.417632258 +0000 UTC m=+1405.944521007" watchObservedRunningTime="2025-12-02 17:06:15.435539128 +0000 UTC m=+1405.962427877" Dec 02 17:06:16 crc kubenswrapper[4747]: I1202 17:06:16.358537 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 02 17:06:16 crc kubenswrapper[4747]: I1202 17:06:16.583136 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.071287 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.166721 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-combined-ca-bundle\") pod \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.167098 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-scripts\") pod \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.167269 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-config-data\") pod \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.167418 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-sg-core-conf-yaml\") pod \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.167712 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d778f77-b54a-4ef2-b446-baa6ec3147ca-run-httpd\") pod \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.167849 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d778f77-b54a-4ef2-b446-baa6ec3147ca-log-httpd\") pod \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.168051 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d778f77-b54a-4ef2-b446-baa6ec3147ca-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6d778f77-b54a-4ef2-b446-baa6ec3147ca" (UID: "6d778f77-b54a-4ef2-b446-baa6ec3147ca"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.168434 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d778f77-b54a-4ef2-b446-baa6ec3147ca-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6d778f77-b54a-4ef2-b446-baa6ec3147ca" (UID: "6d778f77-b54a-4ef2-b446-baa6ec3147ca"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.168769 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dvzms\" (UniqueName: \"kubernetes.io/projected/6d778f77-b54a-4ef2-b446-baa6ec3147ca-kube-api-access-dvzms\") pod \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\" (UID: \"6d778f77-b54a-4ef2-b446-baa6ec3147ca\") " Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.169995 4747 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d778f77-b54a-4ef2-b446-baa6ec3147ca-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.170143 4747 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6d778f77-b54a-4ef2-b446-baa6ec3147ca-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.176682 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d778f77-b54a-4ef2-b446-baa6ec3147ca-kube-api-access-dvzms" (OuterVolumeSpecName: "kube-api-access-dvzms") pod "6d778f77-b54a-4ef2-b446-baa6ec3147ca" (UID: "6d778f77-b54a-4ef2-b446-baa6ec3147ca"). InnerVolumeSpecName "kube-api-access-dvzms". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.205787 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-scripts" (OuterVolumeSpecName: "scripts") pod "6d778f77-b54a-4ef2-b446-baa6ec3147ca" (UID: "6d778f77-b54a-4ef2-b446-baa6ec3147ca"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.213738 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6d778f77-b54a-4ef2-b446-baa6ec3147ca" (UID: "6d778f77-b54a-4ef2-b446-baa6ec3147ca"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.257325 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6d778f77-b54a-4ef2-b446-baa6ec3147ca" (UID: "6d778f77-b54a-4ef2-b446-baa6ec3147ca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.272442 4747 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.272475 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dvzms\" (UniqueName: \"kubernetes.io/projected/6d778f77-b54a-4ef2-b446-baa6ec3147ca-kube-api-access-dvzms\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.272485 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.272494 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.289471 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-config-data" (OuterVolumeSpecName: "config-data") pod "6d778f77-b54a-4ef2-b446-baa6ec3147ca" (UID: "6d778f77-b54a-4ef2-b446-baa6ec3147ca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.374523 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d778f77-b54a-4ef2-b446-baa6ec3147ca-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.433845 4747 generic.go:334] "Generic (PLEG): container finished" podID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerID="3f3f664e9e1227bdcb6ca25ec3a30f39d94ca5bed8b9b97c1ee22266f204e5a7" exitCode=0 Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.433921 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d778f77-b54a-4ef2-b446-baa6ec3147ca","Type":"ContainerDied","Data":"3f3f664e9e1227bdcb6ca25ec3a30f39d94ca5bed8b9b97c1ee22266f204e5a7"} Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.433961 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6d778f77-b54a-4ef2-b446-baa6ec3147ca","Type":"ContainerDied","Data":"ece620da383ffc4bf68703c60a3c11d19948d5091b839173c23c0d6dffa18f38"} Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.433983 4747 scope.go:117] "RemoveContainer" containerID="c6095ce977dbcdf71c8c810fce98ffca6a3f85d9bbe696ba5b013fd041e8fa67" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.434149 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.467311 4747 scope.go:117] "RemoveContainer" containerID="9eb04a61eccf9471f81811ad4f981829796d47352cb7e867291f0be2a783aece" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.473897 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.489967 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.503810 4747 scope.go:117] "RemoveContainer" containerID="3f3f664e9e1227bdcb6ca25ec3a30f39d94ca5bed8b9b97c1ee22266f204e5a7" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.509682 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:06:17 crc kubenswrapper[4747]: E1202 17:06:17.510502 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerName="sg-core" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.510527 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerName="sg-core" Dec 02 17:06:17 crc kubenswrapper[4747]: E1202 17:06:17.510544 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerName="ceilometer-notification-agent" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.510552 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerName="ceilometer-notification-agent" Dec 02 17:06:17 crc kubenswrapper[4747]: E1202 17:06:17.510588 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerName="proxy-httpd" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.510596 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerName="proxy-httpd" Dec 02 17:06:17 crc kubenswrapper[4747]: E1202 17:06:17.510622 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerName="ceilometer-central-agent" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.510629 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerName="ceilometer-central-agent" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.510877 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerName="ceilometer-central-agent" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.510893 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerName="sg-core" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.510939 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerName="proxy-httpd" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.510955 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" containerName="ceilometer-notification-agent" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.513601 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.518757 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.519162 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.519320 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.525446 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.552080 4747 scope.go:117] "RemoveContainer" containerID="6d96f62471ac1a7d4c748c425c7393cb211974c84ab4a4946368583fa3cf4988" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.581741 4747 scope.go:117] "RemoveContainer" containerID="c6095ce977dbcdf71c8c810fce98ffca6a3f85d9bbe696ba5b013fd041e8fa67" Dec 02 17:06:17 crc kubenswrapper[4747]: E1202 17:06:17.582451 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6095ce977dbcdf71c8c810fce98ffca6a3f85d9bbe696ba5b013fd041e8fa67\": container with ID starting with c6095ce977dbcdf71c8c810fce98ffca6a3f85d9bbe696ba5b013fd041e8fa67 not found: ID does not exist" containerID="c6095ce977dbcdf71c8c810fce98ffca6a3f85d9bbe696ba5b013fd041e8fa67" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.583021 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6095ce977dbcdf71c8c810fce98ffca6a3f85d9bbe696ba5b013fd041e8fa67"} err="failed to get container status \"c6095ce977dbcdf71c8c810fce98ffca6a3f85d9bbe696ba5b013fd041e8fa67\": rpc error: code = NotFound desc = could not find container \"c6095ce977dbcdf71c8c810fce98ffca6a3f85d9bbe696ba5b013fd041e8fa67\": container with ID starting with c6095ce977dbcdf71c8c810fce98ffca6a3f85d9bbe696ba5b013fd041e8fa67 not found: ID does not exist" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.583062 4747 scope.go:117] "RemoveContainer" containerID="9eb04a61eccf9471f81811ad4f981829796d47352cb7e867291f0be2a783aece" Dec 02 17:06:17 crc kubenswrapper[4747]: E1202 17:06:17.586442 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9eb04a61eccf9471f81811ad4f981829796d47352cb7e867291f0be2a783aece\": container with ID starting with 9eb04a61eccf9471f81811ad4f981829796d47352cb7e867291f0be2a783aece not found: ID does not exist" containerID="9eb04a61eccf9471f81811ad4f981829796d47352cb7e867291f0be2a783aece" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.586577 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9eb04a61eccf9471f81811ad4f981829796d47352cb7e867291f0be2a783aece"} err="failed to get container status \"9eb04a61eccf9471f81811ad4f981829796d47352cb7e867291f0be2a783aece\": rpc error: code = NotFound desc = could not find container \"9eb04a61eccf9471f81811ad4f981829796d47352cb7e867291f0be2a783aece\": container with ID starting with 9eb04a61eccf9471f81811ad4f981829796d47352cb7e867291f0be2a783aece not found: ID does not exist" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.586682 4747 scope.go:117] "RemoveContainer" containerID="3f3f664e9e1227bdcb6ca25ec3a30f39d94ca5bed8b9b97c1ee22266f204e5a7" Dec 02 17:06:17 crc kubenswrapper[4747]: E1202 17:06:17.587303 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f3f664e9e1227bdcb6ca25ec3a30f39d94ca5bed8b9b97c1ee22266f204e5a7\": container with ID starting with 3f3f664e9e1227bdcb6ca25ec3a30f39d94ca5bed8b9b97c1ee22266f204e5a7 not found: ID does not exist" containerID="3f3f664e9e1227bdcb6ca25ec3a30f39d94ca5bed8b9b97c1ee22266f204e5a7" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.587373 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f3f664e9e1227bdcb6ca25ec3a30f39d94ca5bed8b9b97c1ee22266f204e5a7"} err="failed to get container status \"3f3f664e9e1227bdcb6ca25ec3a30f39d94ca5bed8b9b97c1ee22266f204e5a7\": rpc error: code = NotFound desc = could not find container \"3f3f664e9e1227bdcb6ca25ec3a30f39d94ca5bed8b9b97c1ee22266f204e5a7\": container with ID starting with 3f3f664e9e1227bdcb6ca25ec3a30f39d94ca5bed8b9b97c1ee22266f204e5a7 not found: ID does not exist" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.587418 4747 scope.go:117] "RemoveContainer" containerID="6d96f62471ac1a7d4c748c425c7393cb211974c84ab4a4946368583fa3cf4988" Dec 02 17:06:17 crc kubenswrapper[4747]: E1202 17:06:17.588769 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d96f62471ac1a7d4c748c425c7393cb211974c84ab4a4946368583fa3cf4988\": container with ID starting with 6d96f62471ac1a7d4c748c425c7393cb211974c84ab4a4946368583fa3cf4988 not found: ID does not exist" containerID="6d96f62471ac1a7d4c748c425c7393cb211974c84ab4a4946368583fa3cf4988" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.588864 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d96f62471ac1a7d4c748c425c7393cb211974c84ab4a4946368583fa3cf4988"} err="failed to get container status \"6d96f62471ac1a7d4c748c425c7393cb211974c84ab4a4946368583fa3cf4988\": rpc error: code = NotFound desc = could not find container \"6d96f62471ac1a7d4c748c425c7393cb211974c84ab4a4946368583fa3cf4988\": container with ID starting with 6d96f62471ac1a7d4c748c425c7393cb211974c84ab4a4946368583fa3cf4988 not found: ID does not exist" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.681947 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.682022 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1e77c54-c815-4816-8e92-1af63903dfe2-log-httpd\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.682072 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.682203 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-config-data\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.682238 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svz7k\" (UniqueName: \"kubernetes.io/projected/e1e77c54-c815-4816-8e92-1af63903dfe2-kube-api-access-svz7k\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.682266 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1e77c54-c815-4816-8e92-1af63903dfe2-run-httpd\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.682293 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.682321 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-scripts\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.778281 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d778f77-b54a-4ef2-b446-baa6ec3147ca" path="/var/lib/kubelet/pods/6d778f77-b54a-4ef2-b446-baa6ec3147ca/volumes" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.807941 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.808235 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-config-data\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.808370 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svz7k\" (UniqueName: \"kubernetes.io/projected/e1e77c54-c815-4816-8e92-1af63903dfe2-kube-api-access-svz7k\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.808475 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1e77c54-c815-4816-8e92-1af63903dfe2-run-httpd\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.808567 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.808676 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-scripts\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.809675 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1e77c54-c815-4816-8e92-1af63903dfe2-run-httpd\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.811203 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.811298 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1e77c54-c815-4816-8e92-1af63903dfe2-log-httpd\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.812122 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1e77c54-c815-4816-8e92-1af63903dfe2-log-httpd\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.815183 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.816287 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.820641 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.835149 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svz7k\" (UniqueName: \"kubernetes.io/projected/e1e77c54-c815-4816-8e92-1af63903dfe2-kube-api-access-svz7k\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.839856 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-config-data\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.840542 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-scripts\") pod \"ceilometer-0\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " pod="openstack/ceilometer-0" Dec 02 17:06:17 crc kubenswrapper[4747]: I1202 17:06:17.863734 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:06:18 crc kubenswrapper[4747]: I1202 17:06:18.400671 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:06:18 crc kubenswrapper[4747]: I1202 17:06:18.455077 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1e77c54-c815-4816-8e92-1af63903dfe2","Type":"ContainerStarted","Data":"2e00ee83d882a0d5c3230b16f0ff48ddb2065cbee8faeae94d084747c71cc2aa"} Dec 02 17:06:18 crc kubenswrapper[4747]: I1202 17:06:18.496630 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 02 17:06:18 crc kubenswrapper[4747]: I1202 17:06:18.496692 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 02 17:06:19 crc kubenswrapper[4747]: I1202 17:06:19.473315 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1e77c54-c815-4816-8e92-1af63903dfe2","Type":"ContainerStarted","Data":"f154278e68c0d8865f4975fe7f10168f6735572928c093004a4a3c97b9b49a73"} Dec 02 17:06:19 crc kubenswrapper[4747]: I1202 17:06:19.511170 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6fbb7413-56c4-4018-9e53-584b138878c1" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 17:06:19 crc kubenswrapper[4747]: I1202 17:06:19.511230 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6fbb7413-56c4-4018-9e53-584b138878c1" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 17:06:20 crc kubenswrapper[4747]: I1202 17:06:20.486854 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1e77c54-c815-4816-8e92-1af63903dfe2","Type":"ContainerStarted","Data":"25701f487f89302eda33a38baa35fdf4901707d3355bf5e9db0c0f7e9339a0cb"} Dec 02 17:06:21 crc kubenswrapper[4747]: I1202 17:06:21.501155 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1e77c54-c815-4816-8e92-1af63903dfe2","Type":"ContainerStarted","Data":"220a3f132c69cd51368cc8c23d49216fbebcace9730e004933cb0d48fdc6d2aa"} Dec 02 17:06:21 crc kubenswrapper[4747]: I1202 17:06:21.583025 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 02 17:06:21 crc kubenswrapper[4747]: I1202 17:06:21.626946 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 02 17:06:22 crc kubenswrapper[4747]: I1202 17:06:22.547513 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 02 17:06:22 crc kubenswrapper[4747]: I1202 17:06:22.722246 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 17:06:22 crc kubenswrapper[4747]: I1202 17:06:22.723651 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 17:06:23 crc kubenswrapper[4747]: I1202 17:06:23.806266 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f844f614-cade-44e0-af45-c7f1f3df847c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 17:06:23 crc kubenswrapper[4747]: I1202 17:06:23.806367 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f844f614-cade-44e0-af45-c7f1f3df847c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 17:06:23 crc kubenswrapper[4747]: I1202 17:06:23.811775 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 02 17:06:26 crc kubenswrapper[4747]: I1202 17:06:26.564795 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1e77c54-c815-4816-8e92-1af63903dfe2","Type":"ContainerStarted","Data":"2febcd4dd0100d63e9405fc9cd506555474c1ce45eb85a74c10ed188b1d706fb"} Dec 02 17:06:26 crc kubenswrapper[4747]: I1202 17:06:26.565410 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 17:06:26 crc kubenswrapper[4747]: I1202 17:06:26.594334 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.461121039 podStartE2EDuration="9.594311134s" podCreationTimestamp="2025-12-02 17:06:17 +0000 UTC" firstStartedPulling="2025-12-02 17:06:18.404972557 +0000 UTC m=+1408.931861306" lastFinishedPulling="2025-12-02 17:06:25.538162652 +0000 UTC m=+1416.065051401" observedRunningTime="2025-12-02 17:06:26.591951867 +0000 UTC m=+1417.118840616" watchObservedRunningTime="2025-12-02 17:06:26.594311134 +0000 UTC m=+1417.121199883" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.357607 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.457406 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3760cc83-1810-4268-a539-4d1bc6bd358c-config-data\") pod \"3760cc83-1810-4268-a539-4d1bc6bd358c\" (UID: \"3760cc83-1810-4268-a539-4d1bc6bd358c\") " Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.457506 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fdjxd\" (UniqueName: \"kubernetes.io/projected/3760cc83-1810-4268-a539-4d1bc6bd358c-kube-api-access-fdjxd\") pod \"3760cc83-1810-4268-a539-4d1bc6bd358c\" (UID: \"3760cc83-1810-4268-a539-4d1bc6bd358c\") " Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.457542 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3760cc83-1810-4268-a539-4d1bc6bd358c-combined-ca-bundle\") pod \"3760cc83-1810-4268-a539-4d1bc6bd358c\" (UID: \"3760cc83-1810-4268-a539-4d1bc6bd358c\") " Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.467041 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3760cc83-1810-4268-a539-4d1bc6bd358c-kube-api-access-fdjxd" (OuterVolumeSpecName: "kube-api-access-fdjxd") pod "3760cc83-1810-4268-a539-4d1bc6bd358c" (UID: "3760cc83-1810-4268-a539-4d1bc6bd358c"). InnerVolumeSpecName "kube-api-access-fdjxd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.493133 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3760cc83-1810-4268-a539-4d1bc6bd358c-config-data" (OuterVolumeSpecName: "config-data") pod "3760cc83-1810-4268-a539-4d1bc6bd358c" (UID: "3760cc83-1810-4268-a539-4d1bc6bd358c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.502025 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3760cc83-1810-4268-a539-4d1bc6bd358c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3760cc83-1810-4268-a539-4d1bc6bd358c" (UID: "3760cc83-1810-4268-a539-4d1bc6bd358c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.503378 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.504107 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.510470 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.561414 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3760cc83-1810-4268-a539-4d1bc6bd358c-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.561455 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fdjxd\" (UniqueName: \"kubernetes.io/projected/3760cc83-1810-4268-a539-4d1bc6bd358c-kube-api-access-fdjxd\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.561467 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3760cc83-1810-4268-a539-4d1bc6bd358c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.588316 4747 generic.go:334] "Generic (PLEG): container finished" podID="3760cc83-1810-4268-a539-4d1bc6bd358c" containerID="153154f9572549e4e335fc81d1b6948fea46c6b5395904d91d183af900f673bd" exitCode=137 Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.589036 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.589376 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3760cc83-1810-4268-a539-4d1bc6bd358c","Type":"ContainerDied","Data":"153154f9572549e4e335fc81d1b6948fea46c6b5395904d91d183af900f673bd"} Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.589477 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3760cc83-1810-4268-a539-4d1bc6bd358c","Type":"ContainerDied","Data":"3668a91d7ec67a677dfa904323b19450cadfcd22d3eb3fa774df614ea71fb750"} Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.589505 4747 scope.go:117] "RemoveContainer" containerID="153154f9572549e4e335fc81d1b6948fea46c6b5395904d91d183af900f673bd" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.595614 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.652287 4747 scope.go:117] "RemoveContainer" containerID="153154f9572549e4e335fc81d1b6948fea46c6b5395904d91d183af900f673bd" Dec 02 17:06:28 crc kubenswrapper[4747]: E1202 17:06:28.653107 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"153154f9572549e4e335fc81d1b6948fea46c6b5395904d91d183af900f673bd\": container with ID starting with 153154f9572549e4e335fc81d1b6948fea46c6b5395904d91d183af900f673bd not found: ID does not exist" containerID="153154f9572549e4e335fc81d1b6948fea46c6b5395904d91d183af900f673bd" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.653188 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"153154f9572549e4e335fc81d1b6948fea46c6b5395904d91d183af900f673bd"} err="failed to get container status \"153154f9572549e4e335fc81d1b6948fea46c6b5395904d91d183af900f673bd\": rpc error: code = NotFound desc = could not find container \"153154f9572549e4e335fc81d1b6948fea46c6b5395904d91d183af900f673bd\": container with ID starting with 153154f9572549e4e335fc81d1b6948fea46c6b5395904d91d183af900f673bd not found: ID does not exist" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.659969 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.679880 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.702551 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 17:06:28 crc kubenswrapper[4747]: E1202 17:06:28.703196 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3760cc83-1810-4268-a539-4d1bc6bd358c" containerName="nova-cell1-novncproxy-novncproxy" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.703220 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="3760cc83-1810-4268-a539-4d1bc6bd358c" containerName="nova-cell1-novncproxy-novncproxy" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.703416 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="3760cc83-1810-4268-a539-4d1bc6bd358c" containerName="nova-cell1-novncproxy-novncproxy" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.704308 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.708030 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.708079 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.708366 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.717022 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.872478 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49fa0781-199d-40a5-958f-591c534f25cc-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"49fa0781-199d-40a5-958f-591c534f25cc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.872547 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/49fa0781-199d-40a5-958f-591c534f25cc-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"49fa0781-199d-40a5-958f-591c534f25cc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.872583 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49fa0781-199d-40a5-958f-591c534f25cc-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"49fa0781-199d-40a5-958f-591c534f25cc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.872698 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/49fa0781-199d-40a5-958f-591c534f25cc-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"49fa0781-199d-40a5-958f-591c534f25cc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.872777 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktgv8\" (UniqueName: \"kubernetes.io/projected/49fa0781-199d-40a5-958f-591c534f25cc-kube-api-access-ktgv8\") pod \"nova-cell1-novncproxy-0\" (UID: \"49fa0781-199d-40a5-958f-591c534f25cc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.974570 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49fa0781-199d-40a5-958f-591c534f25cc-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"49fa0781-199d-40a5-958f-591c534f25cc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.974650 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/49fa0781-199d-40a5-958f-591c534f25cc-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"49fa0781-199d-40a5-958f-591c534f25cc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.974688 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49fa0781-199d-40a5-958f-591c534f25cc-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"49fa0781-199d-40a5-958f-591c534f25cc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.974759 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/49fa0781-199d-40a5-958f-591c534f25cc-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"49fa0781-199d-40a5-958f-591c534f25cc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.974832 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktgv8\" (UniqueName: \"kubernetes.io/projected/49fa0781-199d-40a5-958f-591c534f25cc-kube-api-access-ktgv8\") pod \"nova-cell1-novncproxy-0\" (UID: \"49fa0781-199d-40a5-958f-591c534f25cc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.981740 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/49fa0781-199d-40a5-958f-591c534f25cc-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"49fa0781-199d-40a5-958f-591c534f25cc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.981808 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/49fa0781-199d-40a5-958f-591c534f25cc-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"49fa0781-199d-40a5-958f-591c534f25cc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:28 crc kubenswrapper[4747]: I1202 17:06:28.982040 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49fa0781-199d-40a5-958f-591c534f25cc-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"49fa0781-199d-40a5-958f-591c534f25cc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:29 crc kubenswrapper[4747]: I1202 17:06:28.999961 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49fa0781-199d-40a5-958f-591c534f25cc-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"49fa0781-199d-40a5-958f-591c534f25cc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:29 crc kubenswrapper[4747]: I1202 17:06:29.032788 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktgv8\" (UniqueName: \"kubernetes.io/projected/49fa0781-199d-40a5-958f-591c534f25cc-kube-api-access-ktgv8\") pod \"nova-cell1-novncproxy-0\" (UID: \"49fa0781-199d-40a5-958f-591c534f25cc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:29 crc kubenswrapper[4747]: I1202 17:06:29.326174 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:29 crc kubenswrapper[4747]: I1202 17:06:29.773766 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3760cc83-1810-4268-a539-4d1bc6bd358c" path="/var/lib/kubelet/pods/3760cc83-1810-4268-a539-4d1bc6bd358c/volumes" Dec 02 17:06:29 crc kubenswrapper[4747]: I1202 17:06:29.882361 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 17:06:29 crc kubenswrapper[4747]: W1202 17:06:29.884550 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49fa0781_199d_40a5_958f_591c534f25cc.slice/crio-56b49d292b8331ac7ba500585b4aaeb6f839470dabcdfcb67aafdde2e36c31ea WatchSource:0}: Error finding container 56b49d292b8331ac7ba500585b4aaeb6f839470dabcdfcb67aafdde2e36c31ea: Status 404 returned error can't find the container with id 56b49d292b8331ac7ba500585b4aaeb6f839470dabcdfcb67aafdde2e36c31ea Dec 02 17:06:30 crc kubenswrapper[4747]: I1202 17:06:30.624327 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"49fa0781-199d-40a5-958f-591c534f25cc","Type":"ContainerStarted","Data":"5341b30a34156a1e6ecca039e2130cea9977827294aca7be5e8a1e69443adc35"} Dec 02 17:06:30 crc kubenswrapper[4747]: I1202 17:06:30.624876 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"49fa0781-199d-40a5-958f-591c534f25cc","Type":"ContainerStarted","Data":"56b49d292b8331ac7ba500585b4aaeb6f839470dabcdfcb67aafdde2e36c31ea"} Dec 02 17:06:30 crc kubenswrapper[4747]: I1202 17:06:30.649571 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.649547609 podStartE2EDuration="2.649547609s" podCreationTimestamp="2025-12-02 17:06:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:06:30.645298978 +0000 UTC m=+1421.172187737" watchObservedRunningTime="2025-12-02 17:06:30.649547609 +0000 UTC m=+1421.176436358" Dec 02 17:06:31 crc kubenswrapper[4747]: I1202 17:06:31.795646 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:06:31 crc kubenswrapper[4747]: I1202 17:06:31.797422 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:06:32 crc kubenswrapper[4747]: I1202 17:06:32.726700 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 02 17:06:32 crc kubenswrapper[4747]: I1202 17:06:32.727250 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 02 17:06:32 crc kubenswrapper[4747]: I1202 17:06:32.727605 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 02 17:06:32 crc kubenswrapper[4747]: I1202 17:06:32.727630 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 02 17:06:32 crc kubenswrapper[4747]: I1202 17:06:32.729779 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 02 17:06:32 crc kubenswrapper[4747]: I1202 17:06:32.733582 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 02 17:06:32 crc kubenswrapper[4747]: I1202 17:06:32.946186 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-vnnlk"] Dec 02 17:06:32 crc kubenswrapper[4747]: I1202 17:06:32.950585 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:32 crc kubenswrapper[4747]: I1202 17:06:32.977084 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:32 crc kubenswrapper[4747]: I1202 17:06:32.977165 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-config\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:32 crc kubenswrapper[4747]: I1202 17:06:32.977339 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:32 crc kubenswrapper[4747]: I1202 17:06:32.977401 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzzj8\" (UniqueName: \"kubernetes.io/projected/df86854d-111c-4325-97f5-cac39132f37e-kube-api-access-lzzj8\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:32 crc kubenswrapper[4747]: I1202 17:06:32.977455 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:32 crc kubenswrapper[4747]: I1202 17:06:32.977478 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:32 crc kubenswrapper[4747]: I1202 17:06:32.984745 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-vnnlk"] Dec 02 17:06:33 crc kubenswrapper[4747]: I1202 17:06:33.082155 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:33 crc kubenswrapper[4747]: I1202 17:06:33.082209 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:33 crc kubenswrapper[4747]: I1202 17:06:33.082371 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:33 crc kubenswrapper[4747]: I1202 17:06:33.082405 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-config\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:33 crc kubenswrapper[4747]: I1202 17:06:33.082464 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:33 crc kubenswrapper[4747]: I1202 17:06:33.082537 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzzj8\" (UniqueName: \"kubernetes.io/projected/df86854d-111c-4325-97f5-cac39132f37e-kube-api-access-lzzj8\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:33 crc kubenswrapper[4747]: I1202 17:06:33.083115 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:33 crc kubenswrapper[4747]: I1202 17:06:33.083696 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-config\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:33 crc kubenswrapper[4747]: I1202 17:06:33.083707 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:33 crc kubenswrapper[4747]: I1202 17:06:33.085656 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:33 crc kubenswrapper[4747]: I1202 17:06:33.086491 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:33 crc kubenswrapper[4747]: I1202 17:06:33.146958 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzzj8\" (UniqueName: \"kubernetes.io/projected/df86854d-111c-4325-97f5-cac39132f37e-kube-api-access-lzzj8\") pod \"dnsmasq-dns-59cf4bdb65-vnnlk\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:33 crc kubenswrapper[4747]: I1202 17:06:33.290677 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:33 crc kubenswrapper[4747]: I1202 17:06:33.855600 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-vnnlk"] Dec 02 17:06:34 crc kubenswrapper[4747]: I1202 17:06:34.326838 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:34 crc kubenswrapper[4747]: I1202 17:06:34.670258 4747 generic.go:334] "Generic (PLEG): container finished" podID="df86854d-111c-4325-97f5-cac39132f37e" containerID="65f8887f816711826612bb51e2c00a42740b05742a34f625221524d3906e4a69" exitCode=0 Dec 02 17:06:34 crc kubenswrapper[4747]: I1202 17:06:34.670348 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" event={"ID":"df86854d-111c-4325-97f5-cac39132f37e","Type":"ContainerDied","Data":"65f8887f816711826612bb51e2c00a42740b05742a34f625221524d3906e4a69"} Dec 02 17:06:34 crc kubenswrapper[4747]: I1202 17:06:34.670463 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" event={"ID":"df86854d-111c-4325-97f5-cac39132f37e","Type":"ContainerStarted","Data":"119a03b4df4c4b7cb5d2ffba740df86d22add556e616f3ffba2f5097f9d9b540"} Dec 02 17:06:35 crc kubenswrapper[4747]: I1202 17:06:35.145250 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:06:35 crc kubenswrapper[4747]: I1202 17:06:35.145950 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerName="ceilometer-central-agent" containerID="cri-o://f154278e68c0d8865f4975fe7f10168f6735572928c093004a4a3c97b9b49a73" gracePeriod=30 Dec 02 17:06:35 crc kubenswrapper[4747]: I1202 17:06:35.146039 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerName="sg-core" containerID="cri-o://220a3f132c69cd51368cc8c23d49216fbebcace9730e004933cb0d48fdc6d2aa" gracePeriod=30 Dec 02 17:06:35 crc kubenswrapper[4747]: I1202 17:06:35.146065 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerName="proxy-httpd" containerID="cri-o://2febcd4dd0100d63e9405fc9cd506555474c1ce45eb85a74c10ed188b1d706fb" gracePeriod=30 Dec 02 17:06:35 crc kubenswrapper[4747]: I1202 17:06:35.146063 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerName="ceilometer-notification-agent" containerID="cri-o://25701f487f89302eda33a38baa35fdf4901707d3355bf5e9db0c0f7e9339a0cb" gracePeriod=30 Dec 02 17:06:35 crc kubenswrapper[4747]: I1202 17:06:35.387128 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:35 crc kubenswrapper[4747]: I1202 17:06:35.692275 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" event={"ID":"df86854d-111c-4325-97f5-cac39132f37e","Type":"ContainerStarted","Data":"f1ba76e6c515bbe36d1b7944258202e6aa1b8956ca7842520ea1803fd6bc3d62"} Dec 02 17:06:35 crc kubenswrapper[4747]: I1202 17:06:35.692954 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:35 crc kubenswrapper[4747]: I1202 17:06:35.696430 4747 generic.go:334] "Generic (PLEG): container finished" podID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerID="2febcd4dd0100d63e9405fc9cd506555474c1ce45eb85a74c10ed188b1d706fb" exitCode=0 Dec 02 17:06:35 crc kubenswrapper[4747]: I1202 17:06:35.696483 4747 generic.go:334] "Generic (PLEG): container finished" podID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerID="220a3f132c69cd51368cc8c23d49216fbebcace9730e004933cb0d48fdc6d2aa" exitCode=2 Dec 02 17:06:35 crc kubenswrapper[4747]: I1202 17:06:35.696452 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1e77c54-c815-4816-8e92-1af63903dfe2","Type":"ContainerDied","Data":"2febcd4dd0100d63e9405fc9cd506555474c1ce45eb85a74c10ed188b1d706fb"} Dec 02 17:06:35 crc kubenswrapper[4747]: I1202 17:06:35.696521 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1e77c54-c815-4816-8e92-1af63903dfe2","Type":"ContainerDied","Data":"220a3f132c69cd51368cc8c23d49216fbebcace9730e004933cb0d48fdc6d2aa"} Dec 02 17:06:35 crc kubenswrapper[4747]: I1202 17:06:35.696792 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f844f614-cade-44e0-af45-c7f1f3df847c" containerName="nova-api-log" containerID="cri-o://3de2d67c4079a243fc66fe0bdd58120f71797806e79946551e02c21eaf78a355" gracePeriod=30 Dec 02 17:06:35 crc kubenswrapper[4747]: I1202 17:06:35.696857 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f844f614-cade-44e0-af45-c7f1f3df847c" containerName="nova-api-api" containerID="cri-o://c8312e20623d52f35bfdea34a144c692e1a02a4cdbe6c2117d05c1bb5de7ba6e" gracePeriod=30 Dec 02 17:06:35 crc kubenswrapper[4747]: I1202 17:06:35.735393 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" podStartSLOduration=3.735358329 podStartE2EDuration="3.735358329s" podCreationTimestamp="2025-12-02 17:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:06:35.720781854 +0000 UTC m=+1426.247670613" watchObservedRunningTime="2025-12-02 17:06:35.735358329 +0000 UTC m=+1426.262247078" Dec 02 17:06:35 crc kubenswrapper[4747]: E1202 17:06:35.864481 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1e77c54_c815_4816_8e92_1af63903dfe2.slice/crio-f154278e68c0d8865f4975fe7f10168f6735572928c093004a4a3c97b9b49a73.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf844f614_cade_44e0_af45_c7f1f3df847c.slice/crio-3de2d67c4079a243fc66fe0bdd58120f71797806e79946551e02c21eaf78a355.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1e77c54_c815_4816_8e92_1af63903dfe2.slice/crio-conmon-f154278e68c0d8865f4975fe7f10168f6735572928c093004a4a3c97b9b49a73.scope\": RecentStats: unable to find data in memory cache]" Dec 02 17:06:36 crc kubenswrapper[4747]: I1202 17:06:36.708921 4747 generic.go:334] "Generic (PLEG): container finished" podID="f844f614-cade-44e0-af45-c7f1f3df847c" containerID="3de2d67c4079a243fc66fe0bdd58120f71797806e79946551e02c21eaf78a355" exitCode=143 Dec 02 17:06:36 crc kubenswrapper[4747]: I1202 17:06:36.708948 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f844f614-cade-44e0-af45-c7f1f3df847c","Type":"ContainerDied","Data":"3de2d67c4079a243fc66fe0bdd58120f71797806e79946551e02c21eaf78a355"} Dec 02 17:06:36 crc kubenswrapper[4747]: I1202 17:06:36.712496 4747 generic.go:334] "Generic (PLEG): container finished" podID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerID="f154278e68c0d8865f4975fe7f10168f6735572928c093004a4a3c97b9b49a73" exitCode=0 Dec 02 17:06:36 crc kubenswrapper[4747]: I1202 17:06:36.712605 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1e77c54-c815-4816-8e92-1af63903dfe2","Type":"ContainerDied","Data":"f154278e68c0d8865f4975fe7f10168f6735572928c093004a4a3c97b9b49a73"} Dec 02 17:06:38 crc kubenswrapper[4747]: I1202 17:06:38.743386 4747 generic.go:334] "Generic (PLEG): container finished" podID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerID="25701f487f89302eda33a38baa35fdf4901707d3355bf5e9db0c0f7e9339a0cb" exitCode=0 Dec 02 17:06:38 crc kubenswrapper[4747]: I1202 17:06:38.743429 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1e77c54-c815-4816-8e92-1af63903dfe2","Type":"ContainerDied","Data":"25701f487f89302eda33a38baa35fdf4901707d3355bf5e9db0c0f7e9339a0cb"} Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.326833 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.385727 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.398717 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.476233 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcp9j\" (UniqueName: \"kubernetes.io/projected/f844f614-cade-44e0-af45-c7f1f3df847c-kube-api-access-bcp9j\") pod \"f844f614-cade-44e0-af45-c7f1f3df847c\" (UID: \"f844f614-cade-44e0-af45-c7f1f3df847c\") " Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.476594 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f844f614-cade-44e0-af45-c7f1f3df847c-config-data\") pod \"f844f614-cade-44e0-af45-c7f1f3df847c\" (UID: \"f844f614-cade-44e0-af45-c7f1f3df847c\") " Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.477095 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f844f614-cade-44e0-af45-c7f1f3df847c-combined-ca-bundle\") pod \"f844f614-cade-44e0-af45-c7f1f3df847c\" (UID: \"f844f614-cade-44e0-af45-c7f1f3df847c\") " Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.477310 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f844f614-cade-44e0-af45-c7f1f3df847c-logs\") pod \"f844f614-cade-44e0-af45-c7f1f3df847c\" (UID: \"f844f614-cade-44e0-af45-c7f1f3df847c\") " Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.478480 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f844f614-cade-44e0-af45-c7f1f3df847c-logs" (OuterVolumeSpecName: "logs") pod "f844f614-cade-44e0-af45-c7f1f3df847c" (UID: "f844f614-cade-44e0-af45-c7f1f3df847c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.478977 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.479703 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f844f614-cade-44e0-af45-c7f1f3df847c-logs\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.482857 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f844f614-cade-44e0-af45-c7f1f3df847c-kube-api-access-bcp9j" (OuterVolumeSpecName: "kube-api-access-bcp9j") pod "f844f614-cade-44e0-af45-c7f1f3df847c" (UID: "f844f614-cade-44e0-af45-c7f1f3df847c"). InnerVolumeSpecName "kube-api-access-bcp9j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.520978 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f844f614-cade-44e0-af45-c7f1f3df847c-config-data" (OuterVolumeSpecName: "config-data") pod "f844f614-cade-44e0-af45-c7f1f3df847c" (UID: "f844f614-cade-44e0-af45-c7f1f3df847c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.523000 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f844f614-cade-44e0-af45-c7f1f3df847c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f844f614-cade-44e0-af45-c7f1f3df847c" (UID: "f844f614-cade-44e0-af45-c7f1f3df847c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.580723 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-sg-core-conf-yaml\") pod \"e1e77c54-c815-4816-8e92-1af63903dfe2\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.580898 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-ceilometer-tls-certs\") pod \"e1e77c54-c815-4816-8e92-1af63903dfe2\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.580945 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-scripts\") pod \"e1e77c54-c815-4816-8e92-1af63903dfe2\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.581021 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-config-data\") pod \"e1e77c54-c815-4816-8e92-1af63903dfe2\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.581092 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1e77c54-c815-4816-8e92-1af63903dfe2-log-httpd\") pod \"e1e77c54-c815-4816-8e92-1af63903dfe2\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.581186 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svz7k\" (UniqueName: \"kubernetes.io/projected/e1e77c54-c815-4816-8e92-1af63903dfe2-kube-api-access-svz7k\") pod \"e1e77c54-c815-4816-8e92-1af63903dfe2\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.581342 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1e77c54-c815-4816-8e92-1af63903dfe2-run-httpd\") pod \"e1e77c54-c815-4816-8e92-1af63903dfe2\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.581387 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-combined-ca-bundle\") pod \"e1e77c54-c815-4816-8e92-1af63903dfe2\" (UID: \"e1e77c54-c815-4816-8e92-1af63903dfe2\") " Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.582173 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcp9j\" (UniqueName: \"kubernetes.io/projected/f844f614-cade-44e0-af45-c7f1f3df847c-kube-api-access-bcp9j\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.582219 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f844f614-cade-44e0-af45-c7f1f3df847c-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.582229 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f844f614-cade-44e0-af45-c7f1f3df847c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.584384 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1e77c54-c815-4816-8e92-1af63903dfe2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e1e77c54-c815-4816-8e92-1af63903dfe2" (UID: "e1e77c54-c815-4816-8e92-1af63903dfe2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.584703 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1e77c54-c815-4816-8e92-1af63903dfe2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e1e77c54-c815-4816-8e92-1af63903dfe2" (UID: "e1e77c54-c815-4816-8e92-1af63903dfe2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.593493 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1e77c54-c815-4816-8e92-1af63903dfe2-kube-api-access-svz7k" (OuterVolumeSpecName: "kube-api-access-svz7k") pod "e1e77c54-c815-4816-8e92-1af63903dfe2" (UID: "e1e77c54-c815-4816-8e92-1af63903dfe2"). InnerVolumeSpecName "kube-api-access-svz7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.605755 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-scripts" (OuterVolumeSpecName: "scripts") pod "e1e77c54-c815-4816-8e92-1af63903dfe2" (UID: "e1e77c54-c815-4816-8e92-1af63903dfe2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.650116 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e1e77c54-c815-4816-8e92-1af63903dfe2" (UID: "e1e77c54-c815-4816-8e92-1af63903dfe2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.684764 4747 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1e77c54-c815-4816-8e92-1af63903dfe2-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.684803 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svz7k\" (UniqueName: \"kubernetes.io/projected/e1e77c54-c815-4816-8e92-1af63903dfe2-kube-api-access-svz7k\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.684816 4747 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e1e77c54-c815-4816-8e92-1af63903dfe2-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.684828 4747 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.684845 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.705191 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "e1e77c54-c815-4816-8e92-1af63903dfe2" (UID: "e1e77c54-c815-4816-8e92-1af63903dfe2"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.713030 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e1e77c54-c815-4816-8e92-1af63903dfe2" (UID: "e1e77c54-c815-4816-8e92-1af63903dfe2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.740240 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-config-data" (OuterVolumeSpecName: "config-data") pod "e1e77c54-c815-4816-8e92-1af63903dfe2" (UID: "e1e77c54-c815-4816-8e92-1af63903dfe2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.767320 4747 generic.go:334] "Generic (PLEG): container finished" podID="f844f614-cade-44e0-af45-c7f1f3df847c" containerID="c8312e20623d52f35bfdea34a144c692e1a02a4cdbe6c2117d05c1bb5de7ba6e" exitCode=0 Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.777208 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.777294 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.778929 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e1e77c54-c815-4816-8e92-1af63903dfe2","Type":"ContainerDied","Data":"2e00ee83d882a0d5c3230b16f0ff48ddb2065cbee8faeae94d084747c71cc2aa"} Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.779367 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f844f614-cade-44e0-af45-c7f1f3df847c","Type":"ContainerDied","Data":"c8312e20623d52f35bfdea34a144c692e1a02a4cdbe6c2117d05c1bb5de7ba6e"} Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.779491 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f844f614-cade-44e0-af45-c7f1f3df847c","Type":"ContainerDied","Data":"883eba882453479ae58febf015fe5f63def6e1a659415f5376ef769101a560ee"} Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.779610 4747 scope.go:117] "RemoveContainer" containerID="2febcd4dd0100d63e9405fc9cd506555474c1ce45eb85a74c10ed188b1d706fb" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.787587 4747 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.787620 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.787629 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1e77c54-c815-4816-8e92-1af63903dfe2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.810785 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.835894 4747 scope.go:117] "RemoveContainer" containerID="220a3f132c69cd51368cc8c23d49216fbebcace9730e004933cb0d48fdc6d2aa" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.865820 4747 scope.go:117] "RemoveContainer" containerID="25701f487f89302eda33a38baa35fdf4901707d3355bf5e9db0c0f7e9339a0cb" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.927774 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.935337 4747 scope.go:117] "RemoveContainer" containerID="f154278e68c0d8865f4975fe7f10168f6735572928c093004a4a3c97b9b49a73" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.951839 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.964651 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.974738 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.975480 4747 scope.go:117] "RemoveContainer" containerID="c8312e20623d52f35bfdea34a144c692e1a02a4cdbe6c2117d05c1bb5de7ba6e" Dec 02 17:06:39 crc kubenswrapper[4747]: E1202 17:06:39.980841 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerName="proxy-httpd" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.980918 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerName="proxy-httpd" Dec 02 17:06:39 crc kubenswrapper[4747]: E1202 17:06:39.980946 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerName="ceilometer-notification-agent" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.980953 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerName="ceilometer-notification-agent" Dec 02 17:06:39 crc kubenswrapper[4747]: E1202 17:06:39.980968 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f844f614-cade-44e0-af45-c7f1f3df847c" containerName="nova-api-log" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.980976 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f844f614-cade-44e0-af45-c7f1f3df847c" containerName="nova-api-log" Dec 02 17:06:39 crc kubenswrapper[4747]: E1202 17:06:39.981007 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f844f614-cade-44e0-af45-c7f1f3df847c" containerName="nova-api-api" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.981016 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f844f614-cade-44e0-af45-c7f1f3df847c" containerName="nova-api-api" Dec 02 17:06:39 crc kubenswrapper[4747]: E1202 17:06:39.981029 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerName="ceilometer-central-agent" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.981040 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerName="ceilometer-central-agent" Dec 02 17:06:39 crc kubenswrapper[4747]: E1202 17:06:39.981067 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerName="sg-core" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.981076 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerName="sg-core" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.981473 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerName="ceilometer-central-agent" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.981501 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f844f614-cade-44e0-af45-c7f1f3df847c" containerName="nova-api-api" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.981520 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerName="sg-core" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.981529 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerName="proxy-httpd" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.981710 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1e77c54-c815-4816-8e92-1af63903dfe2" containerName="ceilometer-notification-agent" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.981724 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f844f614-cade-44e0-af45-c7f1f3df847c" containerName="nova-api-log" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.986239 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.990568 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.990802 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.990972 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 02 17:06:39 crc kubenswrapper[4747]: I1202 17:06:39.995431 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.007050 4747 scope.go:117] "RemoveContainer" containerID="3de2d67c4079a243fc66fe0bdd58120f71797806e79946551e02c21eaf78a355" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.007159 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.017741 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s25km\" (UniqueName: \"kubernetes.io/projected/85c831d3-50c4-484d-835c-cdf168072a4e-kube-api-access-s25km\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.018048 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85c831d3-50c4-484d-835c-cdf168072a4e-logs\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.018196 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-public-tls-certs\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.018389 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.018638 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-config-data\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.018729 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.020831 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.023367 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.026328 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.029231 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.029672 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.034198 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.053327 4747 scope.go:117] "RemoveContainer" containerID="c8312e20623d52f35bfdea34a144c692e1a02a4cdbe6c2117d05c1bb5de7ba6e" Dec 02 17:06:40 crc kubenswrapper[4747]: E1202 17:06:40.055374 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8312e20623d52f35bfdea34a144c692e1a02a4cdbe6c2117d05c1bb5de7ba6e\": container with ID starting with c8312e20623d52f35bfdea34a144c692e1a02a4cdbe6c2117d05c1bb5de7ba6e not found: ID does not exist" containerID="c8312e20623d52f35bfdea34a144c692e1a02a4cdbe6c2117d05c1bb5de7ba6e" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.055416 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8312e20623d52f35bfdea34a144c692e1a02a4cdbe6c2117d05c1bb5de7ba6e"} err="failed to get container status \"c8312e20623d52f35bfdea34a144c692e1a02a4cdbe6c2117d05c1bb5de7ba6e\": rpc error: code = NotFound desc = could not find container \"c8312e20623d52f35bfdea34a144c692e1a02a4cdbe6c2117d05c1bb5de7ba6e\": container with ID starting with c8312e20623d52f35bfdea34a144c692e1a02a4cdbe6c2117d05c1bb5de7ba6e not found: ID does not exist" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.055444 4747 scope.go:117] "RemoveContainer" containerID="3de2d67c4079a243fc66fe0bdd58120f71797806e79946551e02c21eaf78a355" Dec 02 17:06:40 crc kubenswrapper[4747]: E1202 17:06:40.055722 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3de2d67c4079a243fc66fe0bdd58120f71797806e79946551e02c21eaf78a355\": container with ID starting with 3de2d67c4079a243fc66fe0bdd58120f71797806e79946551e02c21eaf78a355 not found: ID does not exist" containerID="3de2d67c4079a243fc66fe0bdd58120f71797806e79946551e02c21eaf78a355" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.055746 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3de2d67c4079a243fc66fe0bdd58120f71797806e79946551e02c21eaf78a355"} err="failed to get container status \"3de2d67c4079a243fc66fe0bdd58120f71797806e79946551e02c21eaf78a355\": rpc error: code = NotFound desc = could not find container \"3de2d67c4079a243fc66fe0bdd58120f71797806e79946551e02c21eaf78a355\": container with ID starting with 3de2d67c4079a243fc66fe0bdd58120f71797806e79946551e02c21eaf78a355 not found: ID does not exist" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.120665 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.120717 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-config-data\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.120738 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.120763 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.120793 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s25km\" (UniqueName: \"kubernetes.io/projected/85c831d3-50c4-484d-835c-cdf168072a4e-kube-api-access-s25km\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.120819 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-run-httpd\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.120853 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85c831d3-50c4-484d-835c-cdf168072a4e-logs\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.120879 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.120897 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-scripts\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.120929 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-config-data\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.120943 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-log-httpd\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.120979 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.121010 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-public-tls-certs\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.121029 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5dmb\" (UniqueName: \"kubernetes.io/projected/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-kube-api-access-r5dmb\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.121818 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85c831d3-50c4-484d-835c-cdf168072a4e-logs\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.126587 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.127043 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.128328 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-config-data\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.139286 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-public-tls-certs\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.148797 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s25km\" (UniqueName: \"kubernetes.io/projected/85c831d3-50c4-484d-835c-cdf168072a4e-kube-api-access-s25km\") pod \"nova-api-0\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.154666 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-zlcpr"] Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.156358 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zlcpr" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.159018 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.159541 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.171177 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-zlcpr"] Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.222763 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-zlcpr\" (UID: \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\") " pod="openstack/nova-cell1-cell-mapping-zlcpr" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.222851 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.222890 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-config-data\") pod \"nova-cell1-cell-mapping-zlcpr\" (UID: \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\") " pod="openstack/nova-cell1-cell-mapping-zlcpr" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.222930 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-run-httpd\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.222975 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.222994 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-config-data\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.223010 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-scripts\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.223027 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-log-httpd\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.223044 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-scripts\") pod \"nova-cell1-cell-mapping-zlcpr\" (UID: \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\") " pod="openstack/nova-cell1-cell-mapping-zlcpr" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.223108 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.223132 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l76h8\" (UniqueName: \"kubernetes.io/projected/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-kube-api-access-l76h8\") pod \"nova-cell1-cell-mapping-zlcpr\" (UID: \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\") " pod="openstack/nova-cell1-cell-mapping-zlcpr" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.223161 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5dmb\" (UniqueName: \"kubernetes.io/projected/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-kube-api-access-r5dmb\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.224030 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-run-httpd\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.224055 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-log-httpd\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.228439 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.228708 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.229003 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-scripts\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.229632 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.229762 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-config-data\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.245437 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5dmb\" (UniqueName: \"kubernetes.io/projected/3feb84af-eb5c-4165-ba8b-b0c55cd2c369-kube-api-access-r5dmb\") pod \"ceilometer-0\" (UID: \"3feb84af-eb5c-4165-ba8b-b0c55cd2c369\") " pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.323470 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.325242 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-zlcpr\" (UID: \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\") " pod="openstack/nova-cell1-cell-mapping-zlcpr" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.325956 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-config-data\") pod \"nova-cell1-cell-mapping-zlcpr\" (UID: \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\") " pod="openstack/nova-cell1-cell-mapping-zlcpr" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.326099 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-scripts\") pod \"nova-cell1-cell-mapping-zlcpr\" (UID: \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\") " pod="openstack/nova-cell1-cell-mapping-zlcpr" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.326203 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l76h8\" (UniqueName: \"kubernetes.io/projected/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-kube-api-access-l76h8\") pod \"nova-cell1-cell-mapping-zlcpr\" (UID: \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\") " pod="openstack/nova-cell1-cell-mapping-zlcpr" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.329566 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-config-data\") pod \"nova-cell1-cell-mapping-zlcpr\" (UID: \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\") " pod="openstack/nova-cell1-cell-mapping-zlcpr" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.330389 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-zlcpr\" (UID: \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\") " pod="openstack/nova-cell1-cell-mapping-zlcpr" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.331728 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-scripts\") pod \"nova-cell1-cell-mapping-zlcpr\" (UID: \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\") " pod="openstack/nova-cell1-cell-mapping-zlcpr" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.342631 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l76h8\" (UniqueName: \"kubernetes.io/projected/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-kube-api-access-l76h8\") pod \"nova-cell1-cell-mapping-zlcpr\" (UID: \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\") " pod="openstack/nova-cell1-cell-mapping-zlcpr" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.353809 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.496432 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zlcpr" Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.795500 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:40 crc kubenswrapper[4747]: I1202 17:06:40.963365 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 17:06:40 crc kubenswrapper[4747]: W1202 17:06:40.975383 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3feb84af_eb5c_4165_ba8b_b0c55cd2c369.slice/crio-f8fca91f9fb197558a9ad1052ff14c67bec2feea69893f5d4587a54df9cb1e11 WatchSource:0}: Error finding container f8fca91f9fb197558a9ad1052ff14c67bec2feea69893f5d4587a54df9cb1e11: Status 404 returned error can't find the container with id f8fca91f9fb197558a9ad1052ff14c67bec2feea69893f5d4587a54df9cb1e11 Dec 02 17:06:41 crc kubenswrapper[4747]: I1202 17:06:41.085835 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-zlcpr"] Dec 02 17:06:41 crc kubenswrapper[4747]: I1202 17:06:41.793626 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1e77c54-c815-4816-8e92-1af63903dfe2" path="/var/lib/kubelet/pods/e1e77c54-c815-4816-8e92-1af63903dfe2/volumes" Dec 02 17:06:41 crc kubenswrapper[4747]: I1202 17:06:41.797385 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f844f614-cade-44e0-af45-c7f1f3df847c" path="/var/lib/kubelet/pods/f844f614-cade-44e0-af45-c7f1f3df847c/volumes" Dec 02 17:06:41 crc kubenswrapper[4747]: I1202 17:06:41.827602 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3feb84af-eb5c-4165-ba8b-b0c55cd2c369","Type":"ContainerStarted","Data":"f8fca91f9fb197558a9ad1052ff14c67bec2feea69893f5d4587a54df9cb1e11"} Dec 02 17:06:41 crc kubenswrapper[4747]: I1202 17:06:41.831619 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"85c831d3-50c4-484d-835c-cdf168072a4e","Type":"ContainerStarted","Data":"5f221d3cd5faad650c524387a7ab2bf9d76d1f66fd80b1591e29d4ff3d70b22e"} Dec 02 17:06:41 crc kubenswrapper[4747]: I1202 17:06:41.831656 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"85c831d3-50c4-484d-835c-cdf168072a4e","Type":"ContainerStarted","Data":"a462fbc7d3225429537e987ca568fc15b887f9f33e954fe4cc55b9967b94837f"} Dec 02 17:06:41 crc kubenswrapper[4747]: I1202 17:06:41.831666 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"85c831d3-50c4-484d-835c-cdf168072a4e","Type":"ContainerStarted","Data":"843ea71d4c22fedc5d018340f65beead861f4aee9513364390eda7d9a7c1aec4"} Dec 02 17:06:41 crc kubenswrapper[4747]: I1202 17:06:41.835610 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zlcpr" event={"ID":"b5c1a898-e20a-4ac8-bf8f-d798a2298f57","Type":"ContainerStarted","Data":"c18cbe7b76679569e3b73dfeb3211e67d0dc62590f2ad71570275bc3b48bad7e"} Dec 02 17:06:41 crc kubenswrapper[4747]: I1202 17:06:41.835666 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zlcpr" event={"ID":"b5c1a898-e20a-4ac8-bf8f-d798a2298f57","Type":"ContainerStarted","Data":"2a840d1f8a7a2ed654cc5784e3bedda2a1993fcbd15a2ab0b6874dd504bb8fc9"} Dec 02 17:06:41 crc kubenswrapper[4747]: I1202 17:06:41.862771 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.8627421870000003 podStartE2EDuration="2.862742187s" podCreationTimestamp="2025-12-02 17:06:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:06:41.855029468 +0000 UTC m=+1432.381918217" watchObservedRunningTime="2025-12-02 17:06:41.862742187 +0000 UTC m=+1432.389630936" Dec 02 17:06:41 crc kubenswrapper[4747]: I1202 17:06:41.875818 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-zlcpr" podStartSLOduration=1.8757907390000002 podStartE2EDuration="1.875790739s" podCreationTimestamp="2025-12-02 17:06:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:06:41.875084479 +0000 UTC m=+1432.401973228" watchObservedRunningTime="2025-12-02 17:06:41.875790739 +0000 UTC m=+1432.402679488" Dec 02 17:06:42 crc kubenswrapper[4747]: I1202 17:06:42.864955 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3feb84af-eb5c-4165-ba8b-b0c55cd2c369","Type":"ContainerStarted","Data":"48681f07e69083ade35ddf1a850afffcf8b813498b81bdfaacc194ea9df54887"} Dec 02 17:06:42 crc kubenswrapper[4747]: I1202 17:06:42.865043 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3feb84af-eb5c-4165-ba8b-b0c55cd2c369","Type":"ContainerStarted","Data":"1d16b383a2089f921d7cec7c13bd873858ba6dc9a48ac0de4841df6ee72198b9"} Dec 02 17:06:43 crc kubenswrapper[4747]: I1202 17:06:43.293283 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:06:43 crc kubenswrapper[4747]: I1202 17:06:43.374767 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-44bgt"] Dec 02 17:06:43 crc kubenswrapper[4747]: I1202 17:06:43.375203 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" podUID="ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738" containerName="dnsmasq-dns" containerID="cri-o://88c3ca76ceccfbd890fb3653fffe5ffb95ab445de2f87eca618b9259ccb9267b" gracePeriod=10 Dec 02 17:06:43 crc kubenswrapper[4747]: I1202 17:06:43.390440 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" podUID="ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.191:5353: connect: connection refused" Dec 02 17:06:43 crc kubenswrapper[4747]: I1202 17:06:43.881280 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3feb84af-eb5c-4165-ba8b-b0c55cd2c369","Type":"ContainerStarted","Data":"2fa6080799d081b452179ae679c3b99aa4a687e148aa4a92d1e370d79b5113d6"} Dec 02 17:06:43 crc kubenswrapper[4747]: I1202 17:06:43.884166 4747 generic.go:334] "Generic (PLEG): container finished" podID="ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738" containerID="88c3ca76ceccfbd890fb3653fffe5ffb95ab445de2f87eca618b9259ccb9267b" exitCode=0 Dec 02 17:06:43 crc kubenswrapper[4747]: I1202 17:06:43.884219 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" event={"ID":"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738","Type":"ContainerDied","Data":"88c3ca76ceccfbd890fb3653fffe5ffb95ab445de2f87eca618b9259ccb9267b"} Dec 02 17:06:43 crc kubenswrapper[4747]: I1202 17:06:43.984984 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.080829 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5z52j\" (UniqueName: \"kubernetes.io/projected/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-kube-api-access-5z52j\") pod \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.080960 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-dns-swift-storage-0\") pod \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.081026 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-ovsdbserver-nb\") pod \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.081099 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-dns-svc\") pod \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.081183 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-config\") pod \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.081221 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-ovsdbserver-sb\") pod \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\" (UID: \"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738\") " Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.106921 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-kube-api-access-5z52j" (OuterVolumeSpecName: "kube-api-access-5z52j") pod "ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738" (UID: "ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738"). InnerVolumeSpecName "kube-api-access-5z52j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.180704 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738" (UID: "ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.181088 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738" (UID: "ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.184364 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5z52j\" (UniqueName: \"kubernetes.io/projected/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-kube-api-access-5z52j\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.184471 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.184552 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.192877 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738" (UID: "ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.198385 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-config" (OuterVolumeSpecName: "config") pod "ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738" (UID: "ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.205496 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738" (UID: "ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.286607 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.286670 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.286694 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.899630 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" event={"ID":"ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738","Type":"ContainerDied","Data":"e2bd3a21828e49a0c3f8a2f1b771b9b7378c5b1a4231f5c1a4f7459b126e7061"} Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.899683 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-44bgt" Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.899690 4747 scope.go:117] "RemoveContainer" containerID="88c3ca76ceccfbd890fb3653fffe5ffb95ab445de2f87eca618b9259ccb9267b" Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.938637 4747 scope.go:117] "RemoveContainer" containerID="f194db040efb4e2049154dfe398a736516082470ebf13c519a208632d7675976" Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.954741 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-44bgt"] Dec 02 17:06:44 crc kubenswrapper[4747]: I1202 17:06:44.968939 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-44bgt"] Dec 02 17:06:45 crc kubenswrapper[4747]: I1202 17:06:45.773453 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738" path="/var/lib/kubelet/pods/ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738/volumes" Dec 02 17:06:45 crc kubenswrapper[4747]: I1202 17:06:45.917061 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3feb84af-eb5c-4165-ba8b-b0c55cd2c369","Type":"ContainerStarted","Data":"f670204bf2f54aaca9becaed69ba808ff56ae57f638ba6f959caf0528d45d11d"} Dec 02 17:06:45 crc kubenswrapper[4747]: I1202 17:06:45.917216 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 17:06:45 crc kubenswrapper[4747]: I1202 17:06:45.947496 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.690955316 podStartE2EDuration="6.947465419s" podCreationTimestamp="2025-12-02 17:06:39 +0000 UTC" firstStartedPulling="2025-12-02 17:06:40.97862381 +0000 UTC m=+1431.505512559" lastFinishedPulling="2025-12-02 17:06:45.235133913 +0000 UTC m=+1435.762022662" observedRunningTime="2025-12-02 17:06:45.943881347 +0000 UTC m=+1436.470770096" watchObservedRunningTime="2025-12-02 17:06:45.947465419 +0000 UTC m=+1436.474354168" Dec 02 17:06:47 crc kubenswrapper[4747]: I1202 17:06:47.946126 4747 generic.go:334] "Generic (PLEG): container finished" podID="b5c1a898-e20a-4ac8-bf8f-d798a2298f57" containerID="c18cbe7b76679569e3b73dfeb3211e67d0dc62590f2ad71570275bc3b48bad7e" exitCode=0 Dec 02 17:06:47 crc kubenswrapper[4747]: I1202 17:06:47.946377 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zlcpr" event={"ID":"b5c1a898-e20a-4ac8-bf8f-d798a2298f57","Type":"ContainerDied","Data":"c18cbe7b76679569e3b73dfeb3211e67d0dc62590f2ad71570275bc3b48bad7e"} Dec 02 17:06:49 crc kubenswrapper[4747]: I1202 17:06:49.409139 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zlcpr" Dec 02 17:06:49 crc kubenswrapper[4747]: I1202 17:06:49.451383 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-scripts\") pod \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\" (UID: \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\") " Dec 02 17:06:49 crc kubenswrapper[4747]: I1202 17:06:49.451602 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l76h8\" (UniqueName: \"kubernetes.io/projected/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-kube-api-access-l76h8\") pod \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\" (UID: \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\") " Dec 02 17:06:49 crc kubenswrapper[4747]: I1202 17:06:49.451717 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-combined-ca-bundle\") pod \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\" (UID: \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\") " Dec 02 17:06:49 crc kubenswrapper[4747]: I1202 17:06:49.451757 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-config-data\") pod \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\" (UID: \"b5c1a898-e20a-4ac8-bf8f-d798a2298f57\") " Dec 02 17:06:49 crc kubenswrapper[4747]: I1202 17:06:49.462871 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-scripts" (OuterVolumeSpecName: "scripts") pod "b5c1a898-e20a-4ac8-bf8f-d798a2298f57" (UID: "b5c1a898-e20a-4ac8-bf8f-d798a2298f57"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:49 crc kubenswrapper[4747]: I1202 17:06:49.464479 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-kube-api-access-l76h8" (OuterVolumeSpecName: "kube-api-access-l76h8") pod "b5c1a898-e20a-4ac8-bf8f-d798a2298f57" (UID: "b5c1a898-e20a-4ac8-bf8f-d798a2298f57"). InnerVolumeSpecName "kube-api-access-l76h8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:06:49 crc kubenswrapper[4747]: I1202 17:06:49.490178 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-config-data" (OuterVolumeSpecName: "config-data") pod "b5c1a898-e20a-4ac8-bf8f-d798a2298f57" (UID: "b5c1a898-e20a-4ac8-bf8f-d798a2298f57"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:49 crc kubenswrapper[4747]: I1202 17:06:49.494733 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b5c1a898-e20a-4ac8-bf8f-d798a2298f57" (UID: "b5c1a898-e20a-4ac8-bf8f-d798a2298f57"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:49 crc kubenswrapper[4747]: I1202 17:06:49.555998 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:49 crc kubenswrapper[4747]: I1202 17:06:49.556042 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l76h8\" (UniqueName: \"kubernetes.io/projected/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-kube-api-access-l76h8\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:49 crc kubenswrapper[4747]: I1202 17:06:49.556055 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:49 crc kubenswrapper[4747]: I1202 17:06:49.556068 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5c1a898-e20a-4ac8-bf8f-d798a2298f57-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:49 crc kubenswrapper[4747]: I1202 17:06:49.970938 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-zlcpr" event={"ID":"b5c1a898-e20a-4ac8-bf8f-d798a2298f57","Type":"ContainerDied","Data":"2a840d1f8a7a2ed654cc5784e3bedda2a1993fcbd15a2ab0b6874dd504bb8fc9"} Dec 02 17:06:49 crc kubenswrapper[4747]: I1202 17:06:49.971444 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-zlcpr" Dec 02 17:06:49 crc kubenswrapper[4747]: I1202 17:06:49.971474 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2a840d1f8a7a2ed654cc5784e3bedda2a1993fcbd15a2ab0b6874dd504bb8fc9" Dec 02 17:06:50 crc kubenswrapper[4747]: I1202 17:06:50.277503 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:50 crc kubenswrapper[4747]: I1202 17:06:50.278027 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="85c831d3-50c4-484d-835c-cdf168072a4e" containerName="nova-api-log" containerID="cri-o://a462fbc7d3225429537e987ca568fc15b887f9f33e954fe4cc55b9967b94837f" gracePeriod=30 Dec 02 17:06:50 crc kubenswrapper[4747]: I1202 17:06:50.278165 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="85c831d3-50c4-484d-835c-cdf168072a4e" containerName="nova-api-api" containerID="cri-o://5f221d3cd5faad650c524387a7ab2bf9d76d1f66fd80b1591e29d4ff3d70b22e" gracePeriod=30 Dec 02 17:06:50 crc kubenswrapper[4747]: I1202 17:06:50.289897 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 17:06:50 crc kubenswrapper[4747]: I1202 17:06:50.290386 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="ae6eb70b-b206-4849-8120-bc08f4318de1" containerName="nova-scheduler-scheduler" containerID="cri-o://2c335faf751e2e358fdd98bbbbaabef9ab96beed72118f6f32a5bdc4fc7b52f2" gracePeriod=30 Dec 02 17:06:50 crc kubenswrapper[4747]: I1202 17:06:50.331265 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:06:50 crc kubenswrapper[4747]: I1202 17:06:50.331614 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6fbb7413-56c4-4018-9e53-584b138878c1" containerName="nova-metadata-log" containerID="cri-o://bcb8882aab554a7e57c619592e3c367a37fcaa670315509ba4ac42a8b278505c" gracePeriod=30 Dec 02 17:06:50 crc kubenswrapper[4747]: I1202 17:06:50.331974 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6fbb7413-56c4-4018-9e53-584b138878c1" containerName="nova-metadata-metadata" containerID="cri-o://58e1b0a64205d98eeebd6dbe46c64b37c085d5e5c2bc03f0d12a06772bca7d73" gracePeriod=30 Dec 02 17:06:50 crc kubenswrapper[4747]: I1202 17:06:50.983660 4747 generic.go:334] "Generic (PLEG): container finished" podID="6fbb7413-56c4-4018-9e53-584b138878c1" containerID="bcb8882aab554a7e57c619592e3c367a37fcaa670315509ba4ac42a8b278505c" exitCode=143 Dec 02 17:06:50 crc kubenswrapper[4747]: I1202 17:06:50.983963 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6fbb7413-56c4-4018-9e53-584b138878c1","Type":"ContainerDied","Data":"bcb8882aab554a7e57c619592e3c367a37fcaa670315509ba4ac42a8b278505c"} Dec 02 17:06:50 crc kubenswrapper[4747]: I1202 17:06:50.986456 4747 generic.go:334] "Generic (PLEG): container finished" podID="85c831d3-50c4-484d-835c-cdf168072a4e" containerID="5f221d3cd5faad650c524387a7ab2bf9d76d1f66fd80b1591e29d4ff3d70b22e" exitCode=0 Dec 02 17:06:50 crc kubenswrapper[4747]: I1202 17:06:50.986477 4747 generic.go:334] "Generic (PLEG): container finished" podID="85c831d3-50c4-484d-835c-cdf168072a4e" containerID="a462fbc7d3225429537e987ca568fc15b887f9f33e954fe4cc55b9967b94837f" exitCode=143 Dec 02 17:06:50 crc kubenswrapper[4747]: I1202 17:06:50.986495 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"85c831d3-50c4-484d-835c-cdf168072a4e","Type":"ContainerDied","Data":"5f221d3cd5faad650c524387a7ab2bf9d76d1f66fd80b1591e29d4ff3d70b22e"} Dec 02 17:06:50 crc kubenswrapper[4747]: I1202 17:06:50.986512 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"85c831d3-50c4-484d-835c-cdf168072a4e","Type":"ContainerDied","Data":"a462fbc7d3225429537e987ca568fc15b887f9f33e954fe4cc55b9967b94837f"} Dec 02 17:06:54 crc kubenswrapper[4747]: E1202 17:06:51.587665 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2c335faf751e2e358fdd98bbbbaabef9ab96beed72118f6f32a5bdc4fc7b52f2" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 02 17:06:54 crc kubenswrapper[4747]: E1202 17:06:51.591035 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2c335faf751e2e358fdd98bbbbaabef9ab96beed72118f6f32a5bdc4fc7b52f2" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 02 17:06:54 crc kubenswrapper[4747]: E1202 17:06:51.594330 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2c335faf751e2e358fdd98bbbbaabef9ab96beed72118f6f32a5bdc4fc7b52f2" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 02 17:06:54 crc kubenswrapper[4747]: E1202 17:06:51.594372 4747 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="ae6eb70b-b206-4849-8120-bc08f4318de1" containerName="nova-scheduler-scheduler" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.791819 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.821144 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-combined-ca-bundle\") pod \"85c831d3-50c4-484d-835c-cdf168072a4e\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.821317 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-internal-tls-certs\") pod \"85c831d3-50c4-484d-835c-cdf168072a4e\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.821487 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s25km\" (UniqueName: \"kubernetes.io/projected/85c831d3-50c4-484d-835c-cdf168072a4e-kube-api-access-s25km\") pod \"85c831d3-50c4-484d-835c-cdf168072a4e\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.821708 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85c831d3-50c4-484d-835c-cdf168072a4e-logs\") pod \"85c831d3-50c4-484d-835c-cdf168072a4e\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.824128 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-public-tls-certs\") pod \"85c831d3-50c4-484d-835c-cdf168072a4e\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.824325 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-config-data\") pod \"85c831d3-50c4-484d-835c-cdf168072a4e\" (UID: \"85c831d3-50c4-484d-835c-cdf168072a4e\") " Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.829941 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85c831d3-50c4-484d-835c-cdf168072a4e-logs" (OuterVolumeSpecName: "logs") pod "85c831d3-50c4-484d-835c-cdf168072a4e" (UID: "85c831d3-50c4-484d-835c-cdf168072a4e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.865086 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "85c831d3-50c4-484d-835c-cdf168072a4e" (UID: "85c831d3-50c4-484d-835c-cdf168072a4e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.867014 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85c831d3-50c4-484d-835c-cdf168072a4e-kube-api-access-s25km" (OuterVolumeSpecName: "kube-api-access-s25km") pod "85c831d3-50c4-484d-835c-cdf168072a4e" (UID: "85c831d3-50c4-484d-835c-cdf168072a4e"). InnerVolumeSpecName "kube-api-access-s25km". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.878668 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-config-data" (OuterVolumeSpecName: "config-data") pod "85c831d3-50c4-484d-835c-cdf168072a4e" (UID: "85c831d3-50c4-484d-835c-cdf168072a4e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.891058 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "85c831d3-50c4-484d-835c-cdf168072a4e" (UID: "85c831d3-50c4-484d-835c-cdf168072a4e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.902087 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "85c831d3-50c4-484d-835c-cdf168072a4e" (UID: "85c831d3-50c4-484d-835c-cdf168072a4e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.930187 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/85c831d3-50c4-484d-835c-cdf168072a4e-logs\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.930221 4747 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.930232 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.930242 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.930363 4747 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/85c831d3-50c4-484d-835c-cdf168072a4e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:51.930373 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s25km\" (UniqueName: \"kubernetes.io/projected/85c831d3-50c4-484d-835c-cdf168072a4e-kube-api-access-s25km\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.010193 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"85c831d3-50c4-484d-835c-cdf168072a4e","Type":"ContainerDied","Data":"843ea71d4c22fedc5d018340f65beead861f4aee9513364390eda7d9a7c1aec4"} Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.010252 4747 scope.go:117] "RemoveContainer" containerID="5f221d3cd5faad650c524387a7ab2bf9d76d1f66fd80b1591e29d4ff3d70b22e" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.010273 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.052023 4747 scope.go:117] "RemoveContainer" containerID="a462fbc7d3225429537e987ca568fc15b887f9f33e954fe4cc55b9967b94837f" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.064299 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.081857 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.097794 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:54 crc kubenswrapper[4747]: E1202 17:06:52.098511 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5c1a898-e20a-4ac8-bf8f-d798a2298f57" containerName="nova-manage" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.098534 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5c1a898-e20a-4ac8-bf8f-d798a2298f57" containerName="nova-manage" Dec 02 17:06:54 crc kubenswrapper[4747]: E1202 17:06:52.098557 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738" containerName="dnsmasq-dns" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.098564 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738" containerName="dnsmasq-dns" Dec 02 17:06:54 crc kubenswrapper[4747]: E1202 17:06:52.098580 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85c831d3-50c4-484d-835c-cdf168072a4e" containerName="nova-api-api" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.098588 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="85c831d3-50c4-484d-835c-cdf168072a4e" containerName="nova-api-api" Dec 02 17:06:54 crc kubenswrapper[4747]: E1202 17:06:52.098602 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738" containerName="init" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.098610 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738" containerName="init" Dec 02 17:06:54 crc kubenswrapper[4747]: E1202 17:06:52.098629 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85c831d3-50c4-484d-835c-cdf168072a4e" containerName="nova-api-log" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.098635 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="85c831d3-50c4-484d-835c-cdf168072a4e" containerName="nova-api-log" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.098833 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="85c831d3-50c4-484d-835c-cdf168072a4e" containerName="nova-api-api" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.098847 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5c1a898-e20a-4ac8-bf8f-d798a2298f57" containerName="nova-manage" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.098861 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec60e2d4-6b4a-4c7c-a948-b3a7ed21c738" containerName="dnsmasq-dns" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.098869 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="85c831d3-50c4-484d-835c-cdf168072a4e" containerName="nova-api-log" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.100747 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.106324 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.106785 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.106990 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.107470 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.136323 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.136375 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7zxg\" (UniqueName: \"kubernetes.io/projected/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-kube-api-access-r7zxg\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.136401 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-public-tls-certs\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.136418 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.136467 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-config-data\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.136495 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-logs\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.238867 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-config-data\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.238980 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-logs\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.239117 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.239185 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7zxg\" (UniqueName: \"kubernetes.io/projected/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-kube-api-access-r7zxg\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.239223 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-public-tls-certs\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.239250 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.240394 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-logs\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.244572 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-config-data\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.245008 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-internal-tls-certs\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.245142 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-public-tls-certs\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.247890 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.263642 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7zxg\" (UniqueName: \"kubernetes.io/projected/a264e5ce-56ed-4ffa-aee8-9951a0cdd335-kube-api-access-r7zxg\") pod \"nova-api-0\" (UID: \"a264e5ce-56ed-4ffa-aee8-9951a0cdd335\") " pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:52.434525 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:53.773309 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85c831d3-50c4-484d-835c-cdf168072a4e" path="/var/lib/kubelet/pods/85c831d3-50c4-484d-835c-cdf168072a4e/volumes" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:53.831550 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="6fbb7413-56c4-4018-9e53-584b138878c1" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": read tcp 10.217.0.2:41886->10.217.0.195:8775: read: connection reset by peer" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:53.831576 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="6fbb7413-56c4-4018-9e53-584b138878c1" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": read tcp 10.217.0.2:41902->10.217.0.195:8775: read: connection reset by peer" Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:54.041881 4747 generic.go:334] "Generic (PLEG): container finished" podID="ae6eb70b-b206-4849-8120-bc08f4318de1" containerID="2c335faf751e2e358fdd98bbbbaabef9ab96beed72118f6f32a5bdc4fc7b52f2" exitCode=0 Dec 02 17:06:54 crc kubenswrapper[4747]: I1202 17:06:54.041999 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ae6eb70b-b206-4849-8120-bc08f4318de1","Type":"ContainerDied","Data":"2c335faf751e2e358fdd98bbbbaabef9ab96beed72118f6f32a5bdc4fc7b52f2"} Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.084170 4747 generic.go:334] "Generic (PLEG): container finished" podID="6fbb7413-56c4-4018-9e53-584b138878c1" containerID="58e1b0a64205d98eeebd6dbe46c64b37c085d5e5c2bc03f0d12a06772bca7d73" exitCode=0 Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.084459 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6fbb7413-56c4-4018-9e53-584b138878c1","Type":"ContainerDied","Data":"58e1b0a64205d98eeebd6dbe46c64b37c085d5e5c2bc03f0d12a06772bca7d73"} Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.335514 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.343112 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.359193 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.453723 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-combined-ca-bundle\") pod \"6fbb7413-56c4-4018-9e53-584b138878c1\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.453850 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae6eb70b-b206-4849-8120-bc08f4318de1-config-data\") pod \"ae6eb70b-b206-4849-8120-bc08f4318de1\" (UID: \"ae6eb70b-b206-4849-8120-bc08f4318de1\") " Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.453943 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fbb7413-56c4-4018-9e53-584b138878c1-logs\") pod \"6fbb7413-56c4-4018-9e53-584b138878c1\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.454003 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94hfg\" (UniqueName: \"kubernetes.io/projected/ae6eb70b-b206-4849-8120-bc08f4318de1-kube-api-access-94hfg\") pod \"ae6eb70b-b206-4849-8120-bc08f4318de1\" (UID: \"ae6eb70b-b206-4849-8120-bc08f4318de1\") " Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.454138 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-nova-metadata-tls-certs\") pod \"6fbb7413-56c4-4018-9e53-584b138878c1\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.454261 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjvqz\" (UniqueName: \"kubernetes.io/projected/6fbb7413-56c4-4018-9e53-584b138878c1-kube-api-access-rjvqz\") pod \"6fbb7413-56c4-4018-9e53-584b138878c1\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.454313 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae6eb70b-b206-4849-8120-bc08f4318de1-combined-ca-bundle\") pod \"ae6eb70b-b206-4849-8120-bc08f4318de1\" (UID: \"ae6eb70b-b206-4849-8120-bc08f4318de1\") " Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.454465 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-config-data\") pod \"6fbb7413-56c4-4018-9e53-584b138878c1\" (UID: \"6fbb7413-56c4-4018-9e53-584b138878c1\") " Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.469404 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fbb7413-56c4-4018-9e53-584b138878c1-logs" (OuterVolumeSpecName: "logs") pod "6fbb7413-56c4-4018-9e53-584b138878c1" (UID: "6fbb7413-56c4-4018-9e53-584b138878c1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.480551 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae6eb70b-b206-4849-8120-bc08f4318de1-kube-api-access-94hfg" (OuterVolumeSpecName: "kube-api-access-94hfg") pod "ae6eb70b-b206-4849-8120-bc08f4318de1" (UID: "ae6eb70b-b206-4849-8120-bc08f4318de1"). InnerVolumeSpecName "kube-api-access-94hfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.489412 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fbb7413-56c4-4018-9e53-584b138878c1-kube-api-access-rjvqz" (OuterVolumeSpecName: "kube-api-access-rjvqz") pod "6fbb7413-56c4-4018-9e53-584b138878c1" (UID: "6fbb7413-56c4-4018-9e53-584b138878c1"). InnerVolumeSpecName "kube-api-access-rjvqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.504243 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-config-data" (OuterVolumeSpecName: "config-data") pod "6fbb7413-56c4-4018-9e53-584b138878c1" (UID: "6fbb7413-56c4-4018-9e53-584b138878c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.527125 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae6eb70b-b206-4849-8120-bc08f4318de1-config-data" (OuterVolumeSpecName: "config-data") pod "ae6eb70b-b206-4849-8120-bc08f4318de1" (UID: "ae6eb70b-b206-4849-8120-bc08f4318de1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.538078 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6fbb7413-56c4-4018-9e53-584b138878c1" (UID: "6fbb7413-56c4-4018-9e53-584b138878c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.550745 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae6eb70b-b206-4849-8120-bc08f4318de1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae6eb70b-b206-4849-8120-bc08f4318de1" (UID: "ae6eb70b-b206-4849-8120-bc08f4318de1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.563066 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjvqz\" (UniqueName: \"kubernetes.io/projected/6fbb7413-56c4-4018-9e53-584b138878c1-kube-api-access-rjvqz\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.563155 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae6eb70b-b206-4849-8120-bc08f4318de1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.563174 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.563192 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.563226 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae6eb70b-b206-4849-8120-bc08f4318de1-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.563243 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6fbb7413-56c4-4018-9e53-584b138878c1-logs\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.563262 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94hfg\" (UniqueName: \"kubernetes.io/projected/ae6eb70b-b206-4849-8120-bc08f4318de1-kube-api-access-94hfg\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.592976 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "6fbb7413-56c4-4018-9e53-584b138878c1" (UID: "6fbb7413-56c4-4018-9e53-584b138878c1"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:06:55 crc kubenswrapper[4747]: I1202 17:06:55.665873 4747 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6fbb7413-56c4-4018-9e53-584b138878c1-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.104601 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ae6eb70b-b206-4849-8120-bc08f4318de1","Type":"ContainerDied","Data":"e40ba6bc4055bb4e47f2b446fe43ceb9533858766b7c3cfef7a85961a61b562b"} Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.104672 4747 scope.go:117] "RemoveContainer" containerID="2c335faf751e2e358fdd98bbbbaabef9ab96beed72118f6f32a5bdc4fc7b52f2" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.104884 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.109878 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6fbb7413-56c4-4018-9e53-584b138878c1","Type":"ContainerDied","Data":"d325b2d7262395b831274377b460615c540f2c7abde804ded79d475293c379cd"} Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.110045 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.114674 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a264e5ce-56ed-4ffa-aee8-9951a0cdd335","Type":"ContainerStarted","Data":"03aab348d8d1b66b2a3986ddabe69daea6bcd555fc13fa636c60da38aea499da"} Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.114738 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a264e5ce-56ed-4ffa-aee8-9951a0cdd335","Type":"ContainerStarted","Data":"0f7be093307242a816715050435133db2b0712c85cfd1958359a96270eae8507"} Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.150018 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.169009 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.172279 4747 scope.go:117] "RemoveContainer" containerID="58e1b0a64205d98eeebd6dbe46c64b37c085d5e5c2bc03f0d12a06772bca7d73" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.183929 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.207108 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.238998 4747 scope.go:117] "RemoveContainer" containerID="bcb8882aab554a7e57c619592e3c367a37fcaa670315509ba4ac42a8b278505c" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.242716 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 17:06:56 crc kubenswrapper[4747]: E1202 17:06:56.243330 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fbb7413-56c4-4018-9e53-584b138878c1" containerName="nova-metadata-metadata" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.243354 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fbb7413-56c4-4018-9e53-584b138878c1" containerName="nova-metadata-metadata" Dec 02 17:06:56 crc kubenswrapper[4747]: E1202 17:06:56.243383 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae6eb70b-b206-4849-8120-bc08f4318de1" containerName="nova-scheduler-scheduler" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.243392 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae6eb70b-b206-4849-8120-bc08f4318de1" containerName="nova-scheduler-scheduler" Dec 02 17:06:56 crc kubenswrapper[4747]: E1202 17:06:56.243406 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fbb7413-56c4-4018-9e53-584b138878c1" containerName="nova-metadata-log" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.243414 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fbb7413-56c4-4018-9e53-584b138878c1" containerName="nova-metadata-log" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.243647 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fbb7413-56c4-4018-9e53-584b138878c1" containerName="nova-metadata-metadata" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.243673 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae6eb70b-b206-4849-8120-bc08f4318de1" containerName="nova-scheduler-scheduler" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.243694 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fbb7413-56c4-4018-9e53-584b138878c1" containerName="nova-metadata-log" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.244892 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.248698 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.259594 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.275792 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.278215 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.282374 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.282880 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.304925 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.381848 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d9349be-b597-431f-badc-50d7da952f70-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0d9349be-b597-431f-badc-50d7da952f70\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.382076 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k96lq\" (UniqueName: \"kubernetes.io/projected/0d9349be-b597-431f-badc-50d7da952f70-kube-api-access-k96lq\") pod \"nova-scheduler-0\" (UID: \"0d9349be-b597-431f-badc-50d7da952f70\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.382191 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4236f6d8-0d70-4768-8eb5-3847fafdede8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4236f6d8-0d70-4768-8eb5-3847fafdede8\") " pod="openstack/nova-metadata-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.382263 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4236f6d8-0d70-4768-8eb5-3847fafdede8-config-data\") pod \"nova-metadata-0\" (UID: \"4236f6d8-0d70-4768-8eb5-3847fafdede8\") " pod="openstack/nova-metadata-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.382300 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfzl5\" (UniqueName: \"kubernetes.io/projected/4236f6d8-0d70-4768-8eb5-3847fafdede8-kube-api-access-wfzl5\") pod \"nova-metadata-0\" (UID: \"4236f6d8-0d70-4768-8eb5-3847fafdede8\") " pod="openstack/nova-metadata-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.382331 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d9349be-b597-431f-badc-50d7da952f70-config-data\") pod \"nova-scheduler-0\" (UID: \"0d9349be-b597-431f-badc-50d7da952f70\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.382379 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4236f6d8-0d70-4768-8eb5-3847fafdede8-logs\") pod \"nova-metadata-0\" (UID: \"4236f6d8-0d70-4768-8eb5-3847fafdede8\") " pod="openstack/nova-metadata-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.382431 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4236f6d8-0d70-4768-8eb5-3847fafdede8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4236f6d8-0d70-4768-8eb5-3847fafdede8\") " pod="openstack/nova-metadata-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.484799 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k96lq\" (UniqueName: \"kubernetes.io/projected/0d9349be-b597-431f-badc-50d7da952f70-kube-api-access-k96lq\") pod \"nova-scheduler-0\" (UID: \"0d9349be-b597-431f-badc-50d7da952f70\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.484923 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4236f6d8-0d70-4768-8eb5-3847fafdede8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4236f6d8-0d70-4768-8eb5-3847fafdede8\") " pod="openstack/nova-metadata-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.484951 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4236f6d8-0d70-4768-8eb5-3847fafdede8-config-data\") pod \"nova-metadata-0\" (UID: \"4236f6d8-0d70-4768-8eb5-3847fafdede8\") " pod="openstack/nova-metadata-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.484977 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfzl5\" (UniqueName: \"kubernetes.io/projected/4236f6d8-0d70-4768-8eb5-3847fafdede8-kube-api-access-wfzl5\") pod \"nova-metadata-0\" (UID: \"4236f6d8-0d70-4768-8eb5-3847fafdede8\") " pod="openstack/nova-metadata-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.484999 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d9349be-b597-431f-badc-50d7da952f70-config-data\") pod \"nova-scheduler-0\" (UID: \"0d9349be-b597-431f-badc-50d7da952f70\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.485035 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4236f6d8-0d70-4768-8eb5-3847fafdede8-logs\") pod \"nova-metadata-0\" (UID: \"4236f6d8-0d70-4768-8eb5-3847fafdede8\") " pod="openstack/nova-metadata-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.485072 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4236f6d8-0d70-4768-8eb5-3847fafdede8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4236f6d8-0d70-4768-8eb5-3847fafdede8\") " pod="openstack/nova-metadata-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.485139 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d9349be-b597-431f-badc-50d7da952f70-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0d9349be-b597-431f-badc-50d7da952f70\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.485673 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4236f6d8-0d70-4768-8eb5-3847fafdede8-logs\") pod \"nova-metadata-0\" (UID: \"4236f6d8-0d70-4768-8eb5-3847fafdede8\") " pod="openstack/nova-metadata-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.492420 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4236f6d8-0d70-4768-8eb5-3847fafdede8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4236f6d8-0d70-4768-8eb5-3847fafdede8\") " pod="openstack/nova-metadata-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.494006 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d9349be-b597-431f-badc-50d7da952f70-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0d9349be-b597-431f-badc-50d7da952f70\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.496949 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4236f6d8-0d70-4768-8eb5-3847fafdede8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4236f6d8-0d70-4768-8eb5-3847fafdede8\") " pod="openstack/nova-metadata-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.500167 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d9349be-b597-431f-badc-50d7da952f70-config-data\") pod \"nova-scheduler-0\" (UID: \"0d9349be-b597-431f-badc-50d7da952f70\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.505459 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k96lq\" (UniqueName: \"kubernetes.io/projected/0d9349be-b597-431f-badc-50d7da952f70-kube-api-access-k96lq\") pod \"nova-scheduler-0\" (UID: \"0d9349be-b597-431f-badc-50d7da952f70\") " pod="openstack/nova-scheduler-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.513878 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4236f6d8-0d70-4768-8eb5-3847fafdede8-config-data\") pod \"nova-metadata-0\" (UID: \"4236f6d8-0d70-4768-8eb5-3847fafdede8\") " pod="openstack/nova-metadata-0" Dec 02 17:06:56 crc kubenswrapper[4747]: E1202 17:06:56.523775 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5c1a898_e20a_4ac8_bf8f_d798a2298f57.slice/crio-2a840d1f8a7a2ed654cc5784e3bedda2a1993fcbd15a2ab0b6874dd504bb8fc9\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5c1a898_e20a_4ac8_bf8f_d798a2298f57.slice\": RecentStats: unable to find data in memory cache]" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.528929 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfzl5\" (UniqueName: \"kubernetes.io/projected/4236f6d8-0d70-4768-8eb5-3847fafdede8-kube-api-access-wfzl5\") pod \"nova-metadata-0\" (UID: \"4236f6d8-0d70-4768-8eb5-3847fafdede8\") " pod="openstack/nova-metadata-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.642784 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 17:06:56 crc kubenswrapper[4747]: I1202 17:06:56.649520 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 17:06:57 crc kubenswrapper[4747]: I1202 17:06:57.133629 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a264e5ce-56ed-4ffa-aee8-9951a0cdd335","Type":"ContainerStarted","Data":"1ded1de13badecae7f26c440eeb2f2ff2f26fca758a6dafd61930df40fbb11d3"} Dec 02 17:06:57 crc kubenswrapper[4747]: I1202 17:06:57.158573 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 17:06:57 crc kubenswrapper[4747]: I1202 17:06:57.172336 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 17:06:57 crc kubenswrapper[4747]: I1202 17:06:57.181653 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=5.181622944 podStartE2EDuration="5.181622944s" podCreationTimestamp="2025-12-02 17:06:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:06:57.162774328 +0000 UTC m=+1447.689663077" watchObservedRunningTime="2025-12-02 17:06:57.181622944 +0000 UTC m=+1447.708511703" Dec 02 17:06:57 crc kubenswrapper[4747]: I1202 17:06:57.775640 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fbb7413-56c4-4018-9e53-584b138878c1" path="/var/lib/kubelet/pods/6fbb7413-56c4-4018-9e53-584b138878c1/volumes" Dec 02 17:06:57 crc kubenswrapper[4747]: I1202 17:06:57.776502 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae6eb70b-b206-4849-8120-bc08f4318de1" path="/var/lib/kubelet/pods/ae6eb70b-b206-4849-8120-bc08f4318de1/volumes" Dec 02 17:06:58 crc kubenswrapper[4747]: I1202 17:06:58.158043 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d9349be-b597-431f-badc-50d7da952f70","Type":"ContainerStarted","Data":"266b54b3efa26081253583e964d790354cc69b0f43d6d080326402511886c7a9"} Dec 02 17:06:58 crc kubenswrapper[4747]: I1202 17:06:58.158408 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0d9349be-b597-431f-badc-50d7da952f70","Type":"ContainerStarted","Data":"4aeaa4b8d2efa53bd301a1e7e71a442e84dc81922704c2babde9ce66bc3f34ce"} Dec 02 17:06:58 crc kubenswrapper[4747]: I1202 17:06:58.171357 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4236f6d8-0d70-4768-8eb5-3847fafdede8","Type":"ContainerStarted","Data":"4216d003f2c6e36c2c2127b40f265f4a4c98d72983cbaee7d8daf090cc22d749"} Dec 02 17:06:58 crc kubenswrapper[4747]: I1202 17:06:58.171419 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4236f6d8-0d70-4768-8eb5-3847fafdede8","Type":"ContainerStarted","Data":"bfa1df553a085d872708b504766ebcf48abdeda6934708f2915893e9d1f5cc37"} Dec 02 17:06:58 crc kubenswrapper[4747]: I1202 17:06:58.171431 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4236f6d8-0d70-4768-8eb5-3847fafdede8","Type":"ContainerStarted","Data":"e5c189915cced8e04667006eb75339e1feff2b6e7c3fcbeb332b1575292f0518"} Dec 02 17:06:58 crc kubenswrapper[4747]: I1202 17:06:58.186581 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.186554152 podStartE2EDuration="2.186554152s" podCreationTimestamp="2025-12-02 17:06:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:06:58.174980623 +0000 UTC m=+1448.701869372" watchObservedRunningTime="2025-12-02 17:06:58.186554152 +0000 UTC m=+1448.713442901" Dec 02 17:06:58 crc kubenswrapper[4747]: I1202 17:06:58.202090 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.202065744 podStartE2EDuration="2.202065744s" podCreationTimestamp="2025-12-02 17:06:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:06:58.194793907 +0000 UTC m=+1448.721682666" watchObservedRunningTime="2025-12-02 17:06:58.202065744 +0000 UTC m=+1448.728954483" Dec 02 17:07:01 crc kubenswrapper[4747]: I1202 17:07:01.643162 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 02 17:07:01 crc kubenswrapper[4747]: I1202 17:07:01.650196 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 17:07:01 crc kubenswrapper[4747]: I1202 17:07:01.650303 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 17:07:01 crc kubenswrapper[4747]: I1202 17:07:01.795551 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:07:01 crc kubenswrapper[4747]: I1202 17:07:01.795643 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:07:02 crc kubenswrapper[4747]: I1202 17:07:02.435669 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 17:07:02 crc kubenswrapper[4747]: I1202 17:07:02.435748 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 17:07:03 crc kubenswrapper[4747]: I1202 17:07:03.451199 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a264e5ce-56ed-4ffa-aee8-9951a0cdd335" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.205:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 17:07:03 crc kubenswrapper[4747]: I1202 17:07:03.451223 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a264e5ce-56ed-4ffa-aee8-9951a0cdd335" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.205:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 17:07:06 crc kubenswrapper[4747]: I1202 17:07:06.643486 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 02 17:07:06 crc kubenswrapper[4747]: I1202 17:07:06.653253 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 02 17:07:06 crc kubenswrapper[4747]: I1202 17:07:06.653665 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 02 17:07:06 crc kubenswrapper[4747]: I1202 17:07:06.678972 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 02 17:07:06 crc kubenswrapper[4747]: E1202 17:07:06.838558 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5c1a898_e20a_4ac8_bf8f_d798a2298f57.slice/crio-2a840d1f8a7a2ed654cc5784e3bedda2a1993fcbd15a2ab0b6874dd504bb8fc9\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5c1a898_e20a_4ac8_bf8f_d798a2298f57.slice\": RecentStats: unable to find data in memory cache]" Dec 02 17:07:07 crc kubenswrapper[4747]: I1202 17:07:07.323635 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 02 17:07:07 crc kubenswrapper[4747]: I1202 17:07:07.654211 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4236f6d8-0d70-4768-8eb5-3847fafdede8" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.207:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 17:07:07 crc kubenswrapper[4747]: I1202 17:07:07.665303 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4236f6d8-0d70-4768-8eb5-3847fafdede8" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.207:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 17:07:10 crc kubenswrapper[4747]: I1202 17:07:10.365706 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 02 17:07:12 crc kubenswrapper[4747]: I1202 17:07:12.446202 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 02 17:07:12 crc kubenswrapper[4747]: I1202 17:07:12.446728 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 02 17:07:12 crc kubenswrapper[4747]: I1202 17:07:12.456769 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 02 17:07:12 crc kubenswrapper[4747]: I1202 17:07:12.458465 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 02 17:07:13 crc kubenswrapper[4747]: I1202 17:07:13.349516 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 02 17:07:13 crc kubenswrapper[4747]: I1202 17:07:13.356892 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 02 17:07:16 crc kubenswrapper[4747]: I1202 17:07:16.657458 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 02 17:07:16 crc kubenswrapper[4747]: I1202 17:07:16.659456 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 02 17:07:16 crc kubenswrapper[4747]: I1202 17:07:16.666337 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 02 17:07:16 crc kubenswrapper[4747]: I1202 17:07:16.675726 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 02 17:07:17 crc kubenswrapper[4747]: E1202 17:07:17.100053 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5c1a898_e20a_4ac8_bf8f_d798a2298f57.slice/crio-2a840d1f8a7a2ed654cc5784e3bedda2a1993fcbd15a2ab0b6874dd504bb8fc9\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5c1a898_e20a_4ac8_bf8f_d798a2298f57.slice\": RecentStats: unable to find data in memory cache]" Dec 02 17:07:25 crc kubenswrapper[4747]: I1202 17:07:25.365380 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 17:07:27 crc kubenswrapper[4747]: I1202 17:07:27.152471 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 17:07:27 crc kubenswrapper[4747]: E1202 17:07:27.439758 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5c1a898_e20a_4ac8_bf8f_d798a2298f57.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5c1a898_e20a_4ac8_bf8f_d798a2298f57.slice/crio-2a840d1f8a7a2ed654cc5784e3bedda2a1993fcbd15a2ab0b6874dd504bb8fc9\": RecentStats: unable to find data in memory cache]" Dec 02 17:07:31 crc kubenswrapper[4747]: I1202 17:07:31.147933 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="1e1d5029-90ff-4315-8ba4-961286afbb54" containerName="rabbitmq" containerID="cri-o://aa833de0b6ed7ef6b94022a24ff29bef00ec6d77c118291379bb3bd57001aff5" gracePeriod=604795 Dec 02 17:07:31 crc kubenswrapper[4747]: I1202 17:07:31.795746 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:07:31 crc kubenswrapper[4747]: I1202 17:07:31.796113 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:07:31 crc kubenswrapper[4747]: I1202 17:07:31.796163 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 17:07:31 crc kubenswrapper[4747]: I1202 17:07:31.797012 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a9cd5048f2dad4e3a491049097b9d8740c67cd00c6933cd94235f0d88bac7953"} pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 17:07:31 crc kubenswrapper[4747]: I1202 17:07:31.797064 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" containerID="cri-o://a9cd5048f2dad4e3a491049097b9d8740c67cd00c6933cd94235f0d88bac7953" gracePeriod=600 Dec 02 17:07:32 crc kubenswrapper[4747]: I1202 17:07:32.581108 4747 generic.go:334] "Generic (PLEG): container finished" podID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerID="a9cd5048f2dad4e3a491049097b9d8740c67cd00c6933cd94235f0d88bac7953" exitCode=0 Dec 02 17:07:32 crc kubenswrapper[4747]: I1202 17:07:32.581197 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerDied","Data":"a9cd5048f2dad4e3a491049097b9d8740c67cd00c6933cd94235f0d88bac7953"} Dec 02 17:07:32 crc kubenswrapper[4747]: I1202 17:07:32.581796 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705"} Dec 02 17:07:32 crc kubenswrapper[4747]: I1202 17:07:32.581857 4747 scope.go:117] "RemoveContainer" containerID="3bcd3880ed49ebcd50724927f63a47903d028c91da930fb1b60778e7033a6140" Dec 02 17:07:32 crc kubenswrapper[4747]: I1202 17:07:32.979368 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="84019d16-aa94-40fc-9615-45c7d3dcb7b3" containerName="rabbitmq" containerID="cri-o://925fd0a24be60f6c05b52e5e85786f38ce9e346d6d05cc7f3f7a87f1bb5f50ec" gracePeriod=604795 Dec 02 17:07:34 crc kubenswrapper[4747]: I1202 17:07:34.947687 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="1e1d5029-90ff-4315-8ba4-961286afbb54" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.99:5671: connect: connection refused" Dec 02 17:07:35 crc kubenswrapper[4747]: I1202 17:07:35.056940 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="84019d16-aa94-40fc-9615-45c7d3dcb7b3" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: connect: connection refused" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.657674 4747 generic.go:334] "Generic (PLEG): container finished" podID="1e1d5029-90ff-4315-8ba4-961286afbb54" containerID="aa833de0b6ed7ef6b94022a24ff29bef00ec6d77c118291379bb3bd57001aff5" exitCode=0 Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.657730 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1e1d5029-90ff-4315-8ba4-961286afbb54","Type":"ContainerDied","Data":"aa833de0b6ed7ef6b94022a24ff29bef00ec6d77c118291379bb3bd57001aff5"} Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.749704 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 17:07:37 crc kubenswrapper[4747]: E1202 17:07:37.751079 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5c1a898_e20a_4ac8_bf8f_d798a2298f57.slice/crio-2a840d1f8a7a2ed654cc5784e3bedda2a1993fcbd15a2ab0b6874dd504bb8fc9\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5c1a898_e20a_4ac8_bf8f_d798a2298f57.slice\": RecentStats: unable to find data in memory cache]" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.857853 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-plugins\") pod \"1e1d5029-90ff-4315-8ba4-961286afbb54\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.857952 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-erlang-cookie\") pod \"1e1d5029-90ff-4315-8ba4-961286afbb54\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.857995 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-confd\") pod \"1e1d5029-90ff-4315-8ba4-961286afbb54\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.858039 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"1e1d5029-90ff-4315-8ba4-961286afbb54\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.858062 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-config-data\") pod \"1e1d5029-90ff-4315-8ba4-961286afbb54\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.858116 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-tls\") pod \"1e1d5029-90ff-4315-8ba4-961286afbb54\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.858218 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmsrr\" (UniqueName: \"kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-kube-api-access-qmsrr\") pod \"1e1d5029-90ff-4315-8ba4-961286afbb54\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.858290 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-plugins-conf\") pod \"1e1d5029-90ff-4315-8ba4-961286afbb54\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.858363 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1e1d5029-90ff-4315-8ba4-961286afbb54-pod-info\") pod \"1e1d5029-90ff-4315-8ba4-961286afbb54\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.858396 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-server-conf\") pod \"1e1d5029-90ff-4315-8ba4-961286afbb54\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.858415 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1e1d5029-90ff-4315-8ba4-961286afbb54-erlang-cookie-secret\") pod \"1e1d5029-90ff-4315-8ba4-961286afbb54\" (UID: \"1e1d5029-90ff-4315-8ba4-961286afbb54\") " Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.858488 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "1e1d5029-90ff-4315-8ba4-961286afbb54" (UID: "1e1d5029-90ff-4315-8ba4-961286afbb54"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.858828 4747 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.860132 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "1e1d5029-90ff-4315-8ba4-961286afbb54" (UID: "1e1d5029-90ff-4315-8ba4-961286afbb54"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.860776 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "1e1d5029-90ff-4315-8ba4-961286afbb54" (UID: "1e1d5029-90ff-4315-8ba4-961286afbb54"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.867403 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "1e1d5029-90ff-4315-8ba4-961286afbb54" (UID: "1e1d5029-90ff-4315-8ba4-961286afbb54"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.872180 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/1e1d5029-90ff-4315-8ba4-961286afbb54-pod-info" (OuterVolumeSpecName: "pod-info") pod "1e1d5029-90ff-4315-8ba4-961286afbb54" (UID: "1e1d5029-90ff-4315-8ba4-961286afbb54"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.881082 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e1d5029-90ff-4315-8ba4-961286afbb54-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "1e1d5029-90ff-4315-8ba4-961286afbb54" (UID: "1e1d5029-90ff-4315-8ba4-961286afbb54"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.940440 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-kube-api-access-qmsrr" (OuterVolumeSpecName: "kube-api-access-qmsrr") pod "1e1d5029-90ff-4315-8ba4-961286afbb54" (UID: "1e1d5029-90ff-4315-8ba4-961286afbb54"). InnerVolumeSpecName "kube-api-access-qmsrr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.950200 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "1e1d5029-90ff-4315-8ba4-961286afbb54" (UID: "1e1d5029-90ff-4315-8ba4-961286afbb54"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.956157 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-config-data" (OuterVolumeSpecName: "config-data") pod "1e1d5029-90ff-4315-8ba4-961286afbb54" (UID: "1e1d5029-90ff-4315-8ba4-961286afbb54"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.962210 4747 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.962262 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.962273 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.962284 4747 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.962293 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmsrr\" (UniqueName: \"kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-kube-api-access-qmsrr\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.962302 4747 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.962310 4747 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1e1d5029-90ff-4315-8ba4-961286afbb54-pod-info\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:37 crc kubenswrapper[4747]: I1202 17:07:37.962322 4747 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1e1d5029-90ff-4315-8ba4-961286afbb54-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.006784 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.009537 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-server-conf" (OuterVolumeSpecName: "server-conf") pod "1e1d5029-90ff-4315-8ba4-961286afbb54" (UID: "1e1d5029-90ff-4315-8ba4-961286afbb54"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.058353 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "1e1d5029-90ff-4315-8ba4-961286afbb54" (UID: "1e1d5029-90ff-4315-8ba4-961286afbb54"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.064146 4747 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1e1d5029-90ff-4315-8ba4-961286afbb54-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.064180 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.064189 4747 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/1e1d5029-90ff-4315-8ba4-961286afbb54-server-conf\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.669810 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"1e1d5029-90ff-4315-8ba4-961286afbb54","Type":"ContainerDied","Data":"fce135190c0b0ac9cc8e3dde031baaa97ae0943884a5d83f2f745f4bfa8ccbbb"} Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.670226 4747 scope.go:117] "RemoveContainer" containerID="aa833de0b6ed7ef6b94022a24ff29bef00ec6d77c118291379bb3bd57001aff5" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.670011 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.714113 4747 scope.go:117] "RemoveContainer" containerID="92a581541edb61fddec32c7a8654bf8df5774ac48809da7ae01b0513d57d6723" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.717503 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.729561 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.761174 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 17:07:38 crc kubenswrapper[4747]: E1202 17:07:38.761642 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e1d5029-90ff-4315-8ba4-961286afbb54" containerName="rabbitmq" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.761660 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e1d5029-90ff-4315-8ba4-961286afbb54" containerName="rabbitmq" Dec 02 17:07:38 crc kubenswrapper[4747]: E1202 17:07:38.761687 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e1d5029-90ff-4315-8ba4-961286afbb54" containerName="setup-container" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.761695 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e1d5029-90ff-4315-8ba4-961286afbb54" containerName="setup-container" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.761918 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e1d5029-90ff-4315-8ba4-961286afbb54" containerName="rabbitmq" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.763200 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.771670 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.771825 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.772192 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.772345 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.772519 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-4rnd4" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.774865 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.778026 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.785647 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.878928 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/29f33a45-0d0b-4654-879e-94098ab4b4c5-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.879058 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/29f33a45-0d0b-4654-879e-94098ab4b4c5-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.879100 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.879154 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/29f33a45-0d0b-4654-879e-94098ab4b4c5-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.879482 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/29f33a45-0d0b-4654-879e-94098ab4b4c5-pod-info\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.879580 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6lnv\" (UniqueName: \"kubernetes.io/projected/29f33a45-0d0b-4654-879e-94098ab4b4c5-kube-api-access-n6lnv\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.880212 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/29f33a45-0d0b-4654-879e-94098ab4b4c5-server-conf\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.880287 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/29f33a45-0d0b-4654-879e-94098ab4b4c5-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.880326 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/29f33a45-0d0b-4654-879e-94098ab4b4c5-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.880356 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/29f33a45-0d0b-4654-879e-94098ab4b4c5-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.880401 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/29f33a45-0d0b-4654-879e-94098ab4b4c5-config-data\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.982506 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/29f33a45-0d0b-4654-879e-94098ab4b4c5-pod-info\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.982564 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6lnv\" (UniqueName: \"kubernetes.io/projected/29f33a45-0d0b-4654-879e-94098ab4b4c5-kube-api-access-n6lnv\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.982606 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/29f33a45-0d0b-4654-879e-94098ab4b4c5-server-conf\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.982666 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/29f33a45-0d0b-4654-879e-94098ab4b4c5-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.982697 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/29f33a45-0d0b-4654-879e-94098ab4b4c5-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.982729 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/29f33a45-0d0b-4654-879e-94098ab4b4c5-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.982776 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/29f33a45-0d0b-4654-879e-94098ab4b4c5-config-data\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.982860 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/29f33a45-0d0b-4654-879e-94098ab4b4c5-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.982973 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/29f33a45-0d0b-4654-879e-94098ab4b4c5-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.983018 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.983042 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/29f33a45-0d0b-4654-879e-94098ab4b4c5-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.983569 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/29f33a45-0d0b-4654-879e-94098ab4b4c5-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.983953 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.984220 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/29f33a45-0d0b-4654-879e-94098ab4b4c5-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.985170 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/29f33a45-0d0b-4654-879e-94098ab4b4c5-server-conf\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.985430 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/29f33a45-0d0b-4654-879e-94098ab4b4c5-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.988493 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/29f33a45-0d0b-4654-879e-94098ab4b4c5-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.990406 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/29f33a45-0d0b-4654-879e-94098ab4b4c5-pod-info\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.991694 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/29f33a45-0d0b-4654-879e-94098ab4b4c5-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.992084 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/29f33a45-0d0b-4654-879e-94098ab4b4c5-config-data\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:38 crc kubenswrapper[4747]: I1202 17:07:38.992499 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/29f33a45-0d0b-4654-879e-94098ab4b4c5-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.012231 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6lnv\" (UniqueName: \"kubernetes.io/projected/29f33a45-0d0b-4654-879e-94098ab4b4c5-kube-api-access-n6lnv\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.017406 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"29f33a45-0d0b-4654-879e-94098ab4b4c5\") " pod="openstack/rabbitmq-server-0" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.123451 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.602201 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.687488 4747 generic.go:334] "Generic (PLEG): container finished" podID="84019d16-aa94-40fc-9615-45c7d3dcb7b3" containerID="925fd0a24be60f6c05b52e5e85786f38ce9e346d6d05cc7f3f7a87f1bb5f50ec" exitCode=0 Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.687550 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"84019d16-aa94-40fc-9615-45c7d3dcb7b3","Type":"ContainerDied","Data":"925fd0a24be60f6c05b52e5e85786f38ce9e346d6d05cc7f3f7a87f1bb5f50ec"} Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.687996 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"84019d16-aa94-40fc-9615-45c7d3dcb7b3","Type":"ContainerDied","Data":"4967b8694de652e0401a6cd52fd2c37fa5856662b42954b5bd5cc00474d9d60e"} Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.688027 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4967b8694de652e0401a6cd52fd2c37fa5856662b42954b5bd5cc00474d9d60e" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.689396 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.691494 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"29f33a45-0d0b-4654-879e-94098ab4b4c5","Type":"ContainerStarted","Data":"d69a848778040c8ce76600b85e066fbc37a7272b9725da3a3b85472f02a90787"} Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.789456 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e1d5029-90ff-4315-8ba4-961286afbb54" path="/var/lib/kubelet/pods/1e1d5029-90ff-4315-8ba4-961286afbb54/volumes" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.802409 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.802515 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-erlang-cookie\") pod \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.802579 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/84019d16-aa94-40fc-9615-45c7d3dcb7b3-pod-info\") pod \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.802629 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-tls\") pod \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.802684 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/84019d16-aa94-40fc-9615-45c7d3dcb7b3-erlang-cookie-secret\") pod \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.802928 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-plugins-conf\") pod \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.802977 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhmbq\" (UniqueName: \"kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-kube-api-access-mhmbq\") pod \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.803035 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-server-conf\") pod \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.803056 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-config-data\") pod \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.803083 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-confd\") pod \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.803147 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-plugins\") pod \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\" (UID: \"84019d16-aa94-40fc-9615-45c7d3dcb7b3\") " Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.804609 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "84019d16-aa94-40fc-9615-45c7d3dcb7b3" (UID: "84019d16-aa94-40fc-9615-45c7d3dcb7b3"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.807472 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "84019d16-aa94-40fc-9615-45c7d3dcb7b3" (UID: "84019d16-aa94-40fc-9615-45c7d3dcb7b3"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.810386 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "84019d16-aa94-40fc-9615-45c7d3dcb7b3" (UID: "84019d16-aa94-40fc-9615-45c7d3dcb7b3"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.811586 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/84019d16-aa94-40fc-9615-45c7d3dcb7b3-pod-info" (OuterVolumeSpecName: "pod-info") pod "84019d16-aa94-40fc-9615-45c7d3dcb7b3" (UID: "84019d16-aa94-40fc-9615-45c7d3dcb7b3"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.811877 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "84019d16-aa94-40fc-9615-45c7d3dcb7b3" (UID: "84019d16-aa94-40fc-9615-45c7d3dcb7b3"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.812028 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "84019d16-aa94-40fc-9615-45c7d3dcb7b3" (UID: "84019d16-aa94-40fc-9615-45c7d3dcb7b3"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.813268 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84019d16-aa94-40fc-9615-45c7d3dcb7b3-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "84019d16-aa94-40fc-9615-45c7d3dcb7b3" (UID: "84019d16-aa94-40fc-9615-45c7d3dcb7b3"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.833209 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-kube-api-access-mhmbq" (OuterVolumeSpecName: "kube-api-access-mhmbq") pod "84019d16-aa94-40fc-9615-45c7d3dcb7b3" (UID: "84019d16-aa94-40fc-9615-45c7d3dcb7b3"). InnerVolumeSpecName "kube-api-access-mhmbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.863569 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-config-data" (OuterVolumeSpecName: "config-data") pod "84019d16-aa94-40fc-9615-45c7d3dcb7b3" (UID: "84019d16-aa94-40fc-9615-45c7d3dcb7b3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.883724 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-server-conf" (OuterVolumeSpecName: "server-conf") pod "84019d16-aa94-40fc-9615-45c7d3dcb7b3" (UID: "84019d16-aa94-40fc-9615-45c7d3dcb7b3"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.906314 4747 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.906363 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.906376 4747 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.906388 4747 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/84019d16-aa94-40fc-9615-45c7d3dcb7b3-pod-info\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.906397 4747 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.906405 4747 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/84019d16-aa94-40fc-9615-45c7d3dcb7b3-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.906413 4747 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.906421 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhmbq\" (UniqueName: \"kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-kube-api-access-mhmbq\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.906429 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.906438 4747 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/84019d16-aa94-40fc-9615-45c7d3dcb7b3-server-conf\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.938359 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 02 17:07:39 crc kubenswrapper[4747]: I1202 17:07:39.955943 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "84019d16-aa94-40fc-9615-45c7d3dcb7b3" (UID: "84019d16-aa94-40fc-9615-45c7d3dcb7b3"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.009400 4747 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/84019d16-aa94-40fc-9615-45c7d3dcb7b3-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.009441 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.702223 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.741543 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.752529 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.786973 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 17:07:40 crc kubenswrapper[4747]: E1202 17:07:40.787486 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84019d16-aa94-40fc-9615-45c7d3dcb7b3" containerName="rabbitmq" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.787509 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="84019d16-aa94-40fc-9615-45c7d3dcb7b3" containerName="rabbitmq" Dec 02 17:07:40 crc kubenswrapper[4747]: E1202 17:07:40.787522 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84019d16-aa94-40fc-9615-45c7d3dcb7b3" containerName="setup-container" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.787530 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="84019d16-aa94-40fc-9615-45c7d3dcb7b3" containerName="setup-container" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.787793 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="84019d16-aa94-40fc-9615-45c7d3dcb7b3" containerName="rabbitmq" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.789167 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.791398 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.792653 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.792671 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.792952 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.793033 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-4gzcv" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.793105 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.793263 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.816577 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.929340 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/11c3b905-4c74-439b-b032-a3234c0bf501-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.929413 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/11c3b905-4c74-439b-b032-a3234c0bf501-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.929436 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flvp2\" (UniqueName: \"kubernetes.io/projected/11c3b905-4c74-439b-b032-a3234c0bf501-kube-api-access-flvp2\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.929482 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/11c3b905-4c74-439b-b032-a3234c0bf501-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.929542 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/11c3b905-4c74-439b-b032-a3234c0bf501-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.929571 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.929593 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/11c3b905-4c74-439b-b032-a3234c0bf501-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.929616 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/11c3b905-4c74-439b-b032-a3234c0bf501-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.929675 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/11c3b905-4c74-439b-b032-a3234c0bf501-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.929754 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/11c3b905-4c74-439b-b032-a3234c0bf501-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:40 crc kubenswrapper[4747]: I1202 17:07:40.929781 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/11c3b905-4c74-439b-b032-a3234c0bf501-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.031610 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/11c3b905-4c74-439b-b032-a3234c0bf501-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.032074 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/11c3b905-4c74-439b-b032-a3234c0bf501-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.032306 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/11c3b905-4c74-439b-b032-a3234c0bf501-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.032432 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/11c3b905-4c74-439b-b032-a3234c0bf501-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.032621 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/11c3b905-4c74-439b-b032-a3234c0bf501-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.032808 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/11c3b905-4c74-439b-b032-a3234c0bf501-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.032931 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/11c3b905-4c74-439b-b032-a3234c0bf501-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.032940 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flvp2\" (UniqueName: \"kubernetes.io/projected/11c3b905-4c74-439b-b032-a3234c0bf501-kube-api-access-flvp2\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.032826 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/11c3b905-4c74-439b-b032-a3234c0bf501-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.033240 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/11c3b905-4c74-439b-b032-a3234c0bf501-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.033395 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/11c3b905-4c74-439b-b032-a3234c0bf501-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.033436 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.033521 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/11c3b905-4c74-439b-b032-a3234c0bf501-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.033714 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/11c3b905-4c74-439b-b032-a3234c0bf501-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.033719 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/11c3b905-4c74-439b-b032-a3234c0bf501-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.033806 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.035044 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/11c3b905-4c74-439b-b032-a3234c0bf501-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.040041 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/11c3b905-4c74-439b-b032-a3234c0bf501-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.042949 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/11c3b905-4c74-439b-b032-a3234c0bf501-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.044621 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/11c3b905-4c74-439b-b032-a3234c0bf501-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.051638 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/11c3b905-4c74-439b-b032-a3234c0bf501-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.055651 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flvp2\" (UniqueName: \"kubernetes.io/projected/11c3b905-4c74-439b-b032-a3234c0bf501-kube-api-access-flvp2\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.067048 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"11c3b905-4c74-439b-b032-a3234c0bf501\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.116309 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.593734 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 17:07:41 crc kubenswrapper[4747]: W1202 17:07:41.595164 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod11c3b905_4c74_439b_b032_a3234c0bf501.slice/crio-6395bcf5d1b24426dc9ee6747ff5f211420aedade5b122287e14a9ad9305c982 WatchSource:0}: Error finding container 6395bcf5d1b24426dc9ee6747ff5f211420aedade5b122287e14a9ad9305c982: Status 404 returned error can't find the container with id 6395bcf5d1b24426dc9ee6747ff5f211420aedade5b122287e14a9ad9305c982 Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.713803 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"11c3b905-4c74-439b-b032-a3234c0bf501","Type":"ContainerStarted","Data":"6395bcf5d1b24426dc9ee6747ff5f211420aedade5b122287e14a9ad9305c982"} Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.716789 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"29f33a45-0d0b-4654-879e-94098ab4b4c5","Type":"ContainerStarted","Data":"e8b88cbf8ef0d4ec6c489f4a1f0b24110af8f3f8575e04ac53ce33f51e3cf9b1"} Dec 02 17:07:41 crc kubenswrapper[4747]: I1202 17:07:41.776115 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84019d16-aa94-40fc-9615-45c7d3dcb7b3" path="/var/lib/kubelet/pods/84019d16-aa94-40fc-9615-45c7d3dcb7b3/volumes" Dec 02 17:07:43 crc kubenswrapper[4747]: I1202 17:07:43.739585 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"11c3b905-4c74-439b-b032-a3234c0bf501","Type":"ContainerStarted","Data":"78aa5c111264ec1d5f25aa240f599b20702119116c17f7bff77203aa6ba5d19a"} Dec 02 17:07:43 crc kubenswrapper[4747]: I1202 17:07:43.971853 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-cqn8b"] Dec 02 17:07:43 crc kubenswrapper[4747]: I1202 17:07:43.974032 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:43 crc kubenswrapper[4747]: I1202 17:07:43.977664 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 02 17:07:43 crc kubenswrapper[4747]: I1202 17:07:43.996339 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-cqn8b"] Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.050962 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.051054 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-dns-svc\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.051115 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-config\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.051143 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.051182 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.051224 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.051429 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9k66\" (UniqueName: \"kubernetes.io/projected/ee1079cc-b2a9-475d-a348-2d8b7d306002-kube-api-access-h9k66\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.153489 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9k66\" (UniqueName: \"kubernetes.io/projected/ee1079cc-b2a9-475d-a348-2d8b7d306002-kube-api-access-h9k66\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.153549 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.153583 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-dns-svc\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.153643 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-config\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.153665 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.153692 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.153709 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.154613 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-dns-svc\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.154640 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.154873 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-config\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.155063 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.155485 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.156580 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.186269 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9k66\" (UniqueName: \"kubernetes.io/projected/ee1079cc-b2a9-475d-a348-2d8b7d306002-kube-api-access-h9k66\") pod \"dnsmasq-dns-67b789f86c-cqn8b\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.304642 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:44 crc kubenswrapper[4747]: W1202 17:07:44.813996 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee1079cc_b2a9_475d_a348_2d8b7d306002.slice/crio-984d503c0f7f3d8412176a71106c5e656bcc9cdeba0c9fd18fa908eda4dbf84a WatchSource:0}: Error finding container 984d503c0f7f3d8412176a71106c5e656bcc9cdeba0c9fd18fa908eda4dbf84a: Status 404 returned error can't find the container with id 984d503c0f7f3d8412176a71106c5e656bcc9cdeba0c9fd18fa908eda4dbf84a Dec 02 17:07:44 crc kubenswrapper[4747]: I1202 17:07:44.815342 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-cqn8b"] Dec 02 17:07:45 crc kubenswrapper[4747]: I1202 17:07:45.757945 4747 generic.go:334] "Generic (PLEG): container finished" podID="ee1079cc-b2a9-475d-a348-2d8b7d306002" containerID="ef3230557254879d92ea7f92191bf98c8c41fd047cb1737291ec837fb87cc86d" exitCode=0 Dec 02 17:07:45 crc kubenswrapper[4747]: I1202 17:07:45.758041 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" event={"ID":"ee1079cc-b2a9-475d-a348-2d8b7d306002","Type":"ContainerDied","Data":"ef3230557254879d92ea7f92191bf98c8c41fd047cb1737291ec837fb87cc86d"} Dec 02 17:07:45 crc kubenswrapper[4747]: I1202 17:07:45.758583 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" event={"ID":"ee1079cc-b2a9-475d-a348-2d8b7d306002","Type":"ContainerStarted","Data":"984d503c0f7f3d8412176a71106c5e656bcc9cdeba0c9fd18fa908eda4dbf84a"} Dec 02 17:07:46 crc kubenswrapper[4747]: I1202 17:07:46.771326 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" event={"ID":"ee1079cc-b2a9-475d-a348-2d8b7d306002","Type":"ContainerStarted","Data":"d9b6a3e6b4cbdb17d1f5526730ae26eaf426155b07c5931593ed9dc6ce233bea"} Dec 02 17:07:46 crc kubenswrapper[4747]: I1202 17:07:46.771560 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:46 crc kubenswrapper[4747]: I1202 17:07:46.797703 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" podStartSLOduration=3.797677544 podStartE2EDuration="3.797677544s" podCreationTimestamp="2025-12-02 17:07:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:07:46.789204883 +0000 UTC m=+1497.316093632" watchObservedRunningTime="2025-12-02 17:07:46.797677544 +0000 UTC m=+1497.324566283" Dec 02 17:07:48 crc kubenswrapper[4747]: E1202 17:07:48.028154 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5c1a898_e20a_4ac8_bf8f_d798a2298f57.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5c1a898_e20a_4ac8_bf8f_d798a2298f57.slice/crio-2a840d1f8a7a2ed654cc5784e3bedda2a1993fcbd15a2ab0b6874dd504bb8fc9\": RecentStats: unable to find data in memory cache]" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.306768 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.400729 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-vnnlk"] Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.401833 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" podUID="df86854d-111c-4325-97f5-cac39132f37e" containerName="dnsmasq-dns" containerID="cri-o://f1ba76e6c515bbe36d1b7944258202e6aa1b8956ca7842520ea1803fd6bc3d62" gracePeriod=10 Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.611484 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-5c9gt"] Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.613499 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.648121 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-5c9gt"] Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.706611 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.706711 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.706848 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8ssv\" (UniqueName: \"kubernetes.io/projected/b72c7491-0f7d-4d44-9e37-7d04f3046a38-kube-api-access-n8ssv\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.706986 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.707075 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.707114 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.707137 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-config\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.811105 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.811498 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8ssv\" (UniqueName: \"kubernetes.io/projected/b72c7491-0f7d-4d44-9e37-7d04f3046a38-kube-api-access-n8ssv\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.811725 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.811791 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.811831 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.811859 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-config\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.812052 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.813007 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.813888 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.814060 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.814434 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.814468 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-config\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.814807 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b72c7491-0f7d-4d44-9e37-7d04f3046a38-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.874178 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8ssv\" (UniqueName: \"kubernetes.io/projected/b72c7491-0f7d-4d44-9e37-7d04f3046a38-kube-api-access-n8ssv\") pod \"dnsmasq-dns-cb6ffcf87-5c9gt\" (UID: \"b72c7491-0f7d-4d44-9e37-7d04f3046a38\") " pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.882497 4747 generic.go:334] "Generic (PLEG): container finished" podID="df86854d-111c-4325-97f5-cac39132f37e" containerID="f1ba76e6c515bbe36d1b7944258202e6aa1b8956ca7842520ea1803fd6bc3d62" exitCode=0 Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.882817 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" event={"ID":"df86854d-111c-4325-97f5-cac39132f37e","Type":"ContainerDied","Data":"f1ba76e6c515bbe36d1b7944258202e6aa1b8956ca7842520ea1803fd6bc3d62"} Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.945444 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:54 crc kubenswrapper[4747]: I1202 17:07:54.953829 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.015422 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-dns-swift-storage-0\") pod \"df86854d-111c-4325-97f5-cac39132f37e\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.015838 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-ovsdbserver-sb\") pod \"df86854d-111c-4325-97f5-cac39132f37e\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.015943 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-config\") pod \"df86854d-111c-4325-97f5-cac39132f37e\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.015995 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-ovsdbserver-nb\") pod \"df86854d-111c-4325-97f5-cac39132f37e\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.016069 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzzj8\" (UniqueName: \"kubernetes.io/projected/df86854d-111c-4325-97f5-cac39132f37e-kube-api-access-lzzj8\") pod \"df86854d-111c-4325-97f5-cac39132f37e\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.016184 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-dns-svc\") pod \"df86854d-111c-4325-97f5-cac39132f37e\" (UID: \"df86854d-111c-4325-97f5-cac39132f37e\") " Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.025723 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df86854d-111c-4325-97f5-cac39132f37e-kube-api-access-lzzj8" (OuterVolumeSpecName: "kube-api-access-lzzj8") pod "df86854d-111c-4325-97f5-cac39132f37e" (UID: "df86854d-111c-4325-97f5-cac39132f37e"). InnerVolumeSpecName "kube-api-access-lzzj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.077896 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-config" (OuterVolumeSpecName: "config") pod "df86854d-111c-4325-97f5-cac39132f37e" (UID: "df86854d-111c-4325-97f5-cac39132f37e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.079020 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "df86854d-111c-4325-97f5-cac39132f37e" (UID: "df86854d-111c-4325-97f5-cac39132f37e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.102260 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "df86854d-111c-4325-97f5-cac39132f37e" (UID: "df86854d-111c-4325-97f5-cac39132f37e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.104668 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "df86854d-111c-4325-97f5-cac39132f37e" (UID: "df86854d-111c-4325-97f5-cac39132f37e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.119720 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.120110 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.120156 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.120167 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzzj8\" (UniqueName: \"kubernetes.io/projected/df86854d-111c-4325-97f5-cac39132f37e-kube-api-access-lzzj8\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.120178 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.124447 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "df86854d-111c-4325-97f5-cac39132f37e" (UID: "df86854d-111c-4325-97f5-cac39132f37e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.222713 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df86854d-111c-4325-97f5-cac39132f37e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.521714 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-5c9gt"] Dec 02 17:07:55 crc kubenswrapper[4747]: W1202 17:07:55.529554 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb72c7491_0f7d_4d44_9e37_7d04f3046a38.slice/crio-7520f1f6faf81a1350d79445182a734c198e30421260e7d8f69f4ac6a8added8 WatchSource:0}: Error finding container 7520f1f6faf81a1350d79445182a734c198e30421260e7d8f69f4ac6a8added8: Status 404 returned error can't find the container with id 7520f1f6faf81a1350d79445182a734c198e30421260e7d8f69f4ac6a8added8 Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.896673 4747 generic.go:334] "Generic (PLEG): container finished" podID="b72c7491-0f7d-4d44-9e37-7d04f3046a38" containerID="6ea0f0b71c7bfaa8343a0fbd22ff9a6b97c6d79cb5a49fa3974212f2e8403b5a" exitCode=0 Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.896800 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" event={"ID":"b72c7491-0f7d-4d44-9e37-7d04f3046a38","Type":"ContainerDied","Data":"6ea0f0b71c7bfaa8343a0fbd22ff9a6b97c6d79cb5a49fa3974212f2e8403b5a"} Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.897322 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" event={"ID":"b72c7491-0f7d-4d44-9e37-7d04f3046a38","Type":"ContainerStarted","Data":"7520f1f6faf81a1350d79445182a734c198e30421260e7d8f69f4ac6a8added8"} Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.901451 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" event={"ID":"df86854d-111c-4325-97f5-cac39132f37e","Type":"ContainerDied","Data":"119a03b4df4c4b7cb5d2ffba740df86d22add556e616f3ffba2f5097f9d9b540"} Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.901515 4747 scope.go:117] "RemoveContainer" containerID="f1ba76e6c515bbe36d1b7944258202e6aa1b8956ca7842520ea1803fd6bc3d62" Dec 02 17:07:55 crc kubenswrapper[4747]: I1202 17:07:55.901519 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-vnnlk" Dec 02 17:07:56 crc kubenswrapper[4747]: I1202 17:07:56.040202 4747 scope.go:117] "RemoveContainer" containerID="65f8887f816711826612bb51e2c00a42740b05742a34f625221524d3906e4a69" Dec 02 17:07:56 crc kubenswrapper[4747]: I1202 17:07:56.040327 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-vnnlk"] Dec 02 17:07:56 crc kubenswrapper[4747]: I1202 17:07:56.051351 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-vnnlk"] Dec 02 17:07:56 crc kubenswrapper[4747]: I1202 17:07:56.914303 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" event={"ID":"b72c7491-0f7d-4d44-9e37-7d04f3046a38","Type":"ContainerStarted","Data":"1aa6f49420577b5e6c0aad42c249fbc5b917d62aeb023838bf6714efa9be557a"} Dec 02 17:07:56 crc kubenswrapper[4747]: I1202 17:07:56.914670 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:07:56 crc kubenswrapper[4747]: I1202 17:07:56.932176 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" podStartSLOduration=2.932155824 podStartE2EDuration="2.932155824s" podCreationTimestamp="2025-12-02 17:07:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:07:56.930087475 +0000 UTC m=+1507.456976234" watchObservedRunningTime="2025-12-02 17:07:56.932155824 +0000 UTC m=+1507.459044583" Dec 02 17:07:57 crc kubenswrapper[4747]: I1202 17:07:57.772818 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df86854d-111c-4325-97f5-cac39132f37e" path="/var/lib/kubelet/pods/df86854d-111c-4325-97f5-cac39132f37e/volumes" Dec 02 17:08:04 crc kubenswrapper[4747]: I1202 17:08:04.948259 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cb6ffcf87-5c9gt" Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.049175 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-cqn8b"] Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.049561 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" podUID="ee1079cc-b2a9-475d-a348-2d8b7d306002" containerName="dnsmasq-dns" containerID="cri-o://d9b6a3e6b4cbdb17d1f5526730ae26eaf426155b07c5931593ed9dc6ce233bea" gracePeriod=10 Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.230951 4747 scope.go:117] "RemoveContainer" containerID="7267e15b3d012b4499dfc73930d556fa9134fa3b5d4966724b95da81951561e9" Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.590196 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.746587 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-ovsdbserver-nb\") pod \"ee1079cc-b2a9-475d-a348-2d8b7d306002\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.746891 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9k66\" (UniqueName: \"kubernetes.io/projected/ee1079cc-b2a9-475d-a348-2d8b7d306002-kube-api-access-h9k66\") pod \"ee1079cc-b2a9-475d-a348-2d8b7d306002\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.747018 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-openstack-edpm-ipam\") pod \"ee1079cc-b2a9-475d-a348-2d8b7d306002\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.747620 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-dns-svc\") pod \"ee1079cc-b2a9-475d-a348-2d8b7d306002\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.747697 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-config\") pod \"ee1079cc-b2a9-475d-a348-2d8b7d306002\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.747850 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-dns-swift-storage-0\") pod \"ee1079cc-b2a9-475d-a348-2d8b7d306002\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.747995 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-ovsdbserver-sb\") pod \"ee1079cc-b2a9-475d-a348-2d8b7d306002\" (UID: \"ee1079cc-b2a9-475d-a348-2d8b7d306002\") " Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.754381 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee1079cc-b2a9-475d-a348-2d8b7d306002-kube-api-access-h9k66" (OuterVolumeSpecName: "kube-api-access-h9k66") pod "ee1079cc-b2a9-475d-a348-2d8b7d306002" (UID: "ee1079cc-b2a9-475d-a348-2d8b7d306002"). InnerVolumeSpecName "kube-api-access-h9k66". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.800833 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ee1079cc-b2a9-475d-a348-2d8b7d306002" (UID: "ee1079cc-b2a9-475d-a348-2d8b7d306002"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.802849 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-config" (OuterVolumeSpecName: "config") pod "ee1079cc-b2a9-475d-a348-2d8b7d306002" (UID: "ee1079cc-b2a9-475d-a348-2d8b7d306002"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.806499 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ee1079cc-b2a9-475d-a348-2d8b7d306002" (UID: "ee1079cc-b2a9-475d-a348-2d8b7d306002"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.808993 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ee1079cc-b2a9-475d-a348-2d8b7d306002" (UID: "ee1079cc-b2a9-475d-a348-2d8b7d306002"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.812649 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "ee1079cc-b2a9-475d-a348-2d8b7d306002" (UID: "ee1079cc-b2a9-475d-a348-2d8b7d306002"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.827263 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ee1079cc-b2a9-475d-a348-2d8b7d306002" (UID: "ee1079cc-b2a9-475d-a348-2d8b7d306002"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.850653 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9k66\" (UniqueName: \"kubernetes.io/projected/ee1079cc-b2a9-475d-a348-2d8b7d306002-kube-api-access-h9k66\") on node \"crc\" DevicePath \"\"" Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.850711 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.850725 4747 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.850740 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.850754 4747 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.850767 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 17:08:05 crc kubenswrapper[4747]: I1202 17:08:05.850778 4747 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ee1079cc-b2a9-475d-a348-2d8b7d306002-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 17:08:06 crc kubenswrapper[4747]: I1202 17:08:06.049645 4747 generic.go:334] "Generic (PLEG): container finished" podID="ee1079cc-b2a9-475d-a348-2d8b7d306002" containerID="d9b6a3e6b4cbdb17d1f5526730ae26eaf426155b07c5931593ed9dc6ce233bea" exitCode=0 Dec 02 17:08:06 crc kubenswrapper[4747]: I1202 17:08:06.049729 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" Dec 02 17:08:06 crc kubenswrapper[4747]: I1202 17:08:06.049753 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" event={"ID":"ee1079cc-b2a9-475d-a348-2d8b7d306002","Type":"ContainerDied","Data":"d9b6a3e6b4cbdb17d1f5526730ae26eaf426155b07c5931593ed9dc6ce233bea"} Dec 02 17:08:06 crc kubenswrapper[4747]: I1202 17:08:06.050279 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-cqn8b" event={"ID":"ee1079cc-b2a9-475d-a348-2d8b7d306002","Type":"ContainerDied","Data":"984d503c0f7f3d8412176a71106c5e656bcc9cdeba0c9fd18fa908eda4dbf84a"} Dec 02 17:08:06 crc kubenswrapper[4747]: I1202 17:08:06.050316 4747 scope.go:117] "RemoveContainer" containerID="d9b6a3e6b4cbdb17d1f5526730ae26eaf426155b07c5931593ed9dc6ce233bea" Dec 02 17:08:06 crc kubenswrapper[4747]: I1202 17:08:06.071963 4747 scope.go:117] "RemoveContainer" containerID="ef3230557254879d92ea7f92191bf98c8c41fd047cb1737291ec837fb87cc86d" Dec 02 17:08:06 crc kubenswrapper[4747]: I1202 17:08:06.087431 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-cqn8b"] Dec 02 17:08:06 crc kubenswrapper[4747]: I1202 17:08:06.096593 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-cqn8b"] Dec 02 17:08:06 crc kubenswrapper[4747]: I1202 17:08:06.109996 4747 scope.go:117] "RemoveContainer" containerID="d9b6a3e6b4cbdb17d1f5526730ae26eaf426155b07c5931593ed9dc6ce233bea" Dec 02 17:08:06 crc kubenswrapper[4747]: E1202 17:08:06.110759 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9b6a3e6b4cbdb17d1f5526730ae26eaf426155b07c5931593ed9dc6ce233bea\": container with ID starting with d9b6a3e6b4cbdb17d1f5526730ae26eaf426155b07c5931593ed9dc6ce233bea not found: ID does not exist" containerID="d9b6a3e6b4cbdb17d1f5526730ae26eaf426155b07c5931593ed9dc6ce233bea" Dec 02 17:08:06 crc kubenswrapper[4747]: I1202 17:08:06.110870 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9b6a3e6b4cbdb17d1f5526730ae26eaf426155b07c5931593ed9dc6ce233bea"} err="failed to get container status \"d9b6a3e6b4cbdb17d1f5526730ae26eaf426155b07c5931593ed9dc6ce233bea\": rpc error: code = NotFound desc = could not find container \"d9b6a3e6b4cbdb17d1f5526730ae26eaf426155b07c5931593ed9dc6ce233bea\": container with ID starting with d9b6a3e6b4cbdb17d1f5526730ae26eaf426155b07c5931593ed9dc6ce233bea not found: ID does not exist" Dec 02 17:08:06 crc kubenswrapper[4747]: I1202 17:08:06.111042 4747 scope.go:117] "RemoveContainer" containerID="ef3230557254879d92ea7f92191bf98c8c41fd047cb1737291ec837fb87cc86d" Dec 02 17:08:06 crc kubenswrapper[4747]: E1202 17:08:06.111607 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef3230557254879d92ea7f92191bf98c8c41fd047cb1737291ec837fb87cc86d\": container with ID starting with ef3230557254879d92ea7f92191bf98c8c41fd047cb1737291ec837fb87cc86d not found: ID does not exist" containerID="ef3230557254879d92ea7f92191bf98c8c41fd047cb1737291ec837fb87cc86d" Dec 02 17:08:06 crc kubenswrapper[4747]: I1202 17:08:06.111680 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef3230557254879d92ea7f92191bf98c8c41fd047cb1737291ec837fb87cc86d"} err="failed to get container status \"ef3230557254879d92ea7f92191bf98c8c41fd047cb1737291ec837fb87cc86d\": rpc error: code = NotFound desc = could not find container \"ef3230557254879d92ea7f92191bf98c8c41fd047cb1737291ec837fb87cc86d\": container with ID starting with ef3230557254879d92ea7f92191bf98c8c41fd047cb1737291ec837fb87cc86d not found: ID does not exist" Dec 02 17:08:07 crc kubenswrapper[4747]: I1202 17:08:07.772129 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee1079cc-b2a9-475d-a348-2d8b7d306002" path="/var/lib/kubelet/pods/ee1079cc-b2a9-475d-a348-2d8b7d306002/volumes" Dec 02 17:08:14 crc kubenswrapper[4747]: I1202 17:08:14.148793 4747 generic.go:334] "Generic (PLEG): container finished" podID="29f33a45-0d0b-4654-879e-94098ab4b4c5" containerID="e8b88cbf8ef0d4ec6c489f4a1f0b24110af8f3f8575e04ac53ce33f51e3cf9b1" exitCode=0 Dec 02 17:08:14 crc kubenswrapper[4747]: I1202 17:08:14.148949 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"29f33a45-0d0b-4654-879e-94098ab4b4c5","Type":"ContainerDied","Data":"e8b88cbf8ef0d4ec6c489f4a1f0b24110af8f3f8575e04ac53ce33f51e3cf9b1"} Dec 02 17:08:15 crc kubenswrapper[4747]: I1202 17:08:15.162371 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"29f33a45-0d0b-4654-879e-94098ab4b4c5","Type":"ContainerStarted","Data":"8ce7335d66f9c394071e2bde14794db2672e401c03226103dd7d7967fcf98563"} Dec 02 17:08:15 crc kubenswrapper[4747]: I1202 17:08:15.162838 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 02 17:08:15 crc kubenswrapper[4747]: I1202 17:08:15.190150 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.190124787 podStartE2EDuration="37.190124787s" podCreationTimestamp="2025-12-02 17:07:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:08:15.183524389 +0000 UTC m=+1525.710413138" watchObservedRunningTime="2025-12-02 17:08:15.190124787 +0000 UTC m=+1525.717013536" Dec 02 17:08:16 crc kubenswrapper[4747]: I1202 17:08:16.192531 4747 generic.go:334] "Generic (PLEG): container finished" podID="11c3b905-4c74-439b-b032-a3234c0bf501" containerID="78aa5c111264ec1d5f25aa240f599b20702119116c17f7bff77203aa6ba5d19a" exitCode=0 Dec 02 17:08:16 crc kubenswrapper[4747]: I1202 17:08:16.192672 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"11c3b905-4c74-439b-b032-a3234c0bf501","Type":"ContainerDied","Data":"78aa5c111264ec1d5f25aa240f599b20702119116c17f7bff77203aa6ba5d19a"} Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.206323 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"11c3b905-4c74-439b-b032-a3234c0bf501","Type":"ContainerStarted","Data":"52aea366ed04c94709af89e069c5c8384a79632d523224643df753505fce85cb"} Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.208180 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.240416 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.240382793 podStartE2EDuration="37.240382793s" podCreationTimestamp="2025-12-02 17:07:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:08:17.234036342 +0000 UTC m=+1527.760925101" watchObservedRunningTime="2025-12-02 17:08:17.240382793 +0000 UTC m=+1527.767271572" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.294247 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92"] Dec 02 17:08:17 crc kubenswrapper[4747]: E1202 17:08:17.294838 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee1079cc-b2a9-475d-a348-2d8b7d306002" containerName="dnsmasq-dns" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.294863 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee1079cc-b2a9-475d-a348-2d8b7d306002" containerName="dnsmasq-dns" Dec 02 17:08:17 crc kubenswrapper[4747]: E1202 17:08:17.294883 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df86854d-111c-4325-97f5-cac39132f37e" containerName="init" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.294892 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="df86854d-111c-4325-97f5-cac39132f37e" containerName="init" Dec 02 17:08:17 crc kubenswrapper[4747]: E1202 17:08:17.294925 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee1079cc-b2a9-475d-a348-2d8b7d306002" containerName="init" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.294934 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee1079cc-b2a9-475d-a348-2d8b7d306002" containerName="init" Dec 02 17:08:17 crc kubenswrapper[4747]: E1202 17:08:17.294956 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df86854d-111c-4325-97f5-cac39132f37e" containerName="dnsmasq-dns" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.294962 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="df86854d-111c-4325-97f5-cac39132f37e" containerName="dnsmasq-dns" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.295187 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="df86854d-111c-4325-97f5-cac39132f37e" containerName="dnsmasq-dns" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.295215 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee1079cc-b2a9-475d-a348-2d8b7d306002" containerName="dnsmasq-dns" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.295953 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.297897 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mt9sq" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.298334 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.298585 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.298801 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.311089 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92"] Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.388622 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92\" (UID: \"0fb63be9-f6ff-45ad-a564-6e43493ea683\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.388713 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92\" (UID: \"0fb63be9-f6ff-45ad-a564-6e43493ea683\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.388814 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92\" (UID: \"0fb63be9-f6ff-45ad-a564-6e43493ea683\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.388845 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lh8z\" (UniqueName: \"kubernetes.io/projected/0fb63be9-f6ff-45ad-a564-6e43493ea683-kube-api-access-9lh8z\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92\" (UID: \"0fb63be9-f6ff-45ad-a564-6e43493ea683\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.490512 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92\" (UID: \"0fb63be9-f6ff-45ad-a564-6e43493ea683\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.490657 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92\" (UID: \"0fb63be9-f6ff-45ad-a564-6e43493ea683\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.490691 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lh8z\" (UniqueName: \"kubernetes.io/projected/0fb63be9-f6ff-45ad-a564-6e43493ea683-kube-api-access-9lh8z\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92\" (UID: \"0fb63be9-f6ff-45ad-a564-6e43493ea683\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.490785 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92\" (UID: \"0fb63be9-f6ff-45ad-a564-6e43493ea683\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.495357 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92\" (UID: \"0fb63be9-f6ff-45ad-a564-6e43493ea683\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.495894 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92\" (UID: \"0fb63be9-f6ff-45ad-a564-6e43493ea683\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.497111 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92\" (UID: \"0fb63be9-f6ff-45ad-a564-6e43493ea683\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.524417 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lh8z\" (UniqueName: \"kubernetes.io/projected/0fb63be9-f6ff-45ad-a564-6e43493ea683-kube-api-access-9lh8z\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92\" (UID: \"0fb63be9-f6ff-45ad-a564-6e43493ea683\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" Dec 02 17:08:17 crc kubenswrapper[4747]: I1202 17:08:17.643875 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" Dec 02 17:08:18 crc kubenswrapper[4747]: I1202 17:08:18.262212 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92"] Dec 02 17:08:19 crc kubenswrapper[4747]: I1202 17:08:19.224059 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" event={"ID":"0fb63be9-f6ff-45ad-a564-6e43493ea683","Type":"ContainerStarted","Data":"4e691c47a44724fff44430d7ae6453874e3c9fe6ae28180433eba9ae7148308a"} Dec 02 17:08:29 crc kubenswrapper[4747]: I1202 17:08:29.129116 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 02 17:08:31 crc kubenswrapper[4747]: I1202 17:08:31.121096 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 02 17:08:35 crc kubenswrapper[4747]: I1202 17:08:35.412869 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" event={"ID":"0fb63be9-f6ff-45ad-a564-6e43493ea683","Type":"ContainerStarted","Data":"db7b5fe329b19fe6531aa0eb9212fcdb177bacb06f00e3d48b6f9870056274e9"} Dec 02 17:08:46 crc kubenswrapper[4747]: I1202 17:08:46.510333 4747 generic.go:334] "Generic (PLEG): container finished" podID="0fb63be9-f6ff-45ad-a564-6e43493ea683" containerID="db7b5fe329b19fe6531aa0eb9212fcdb177bacb06f00e3d48b6f9870056274e9" exitCode=0 Dec 02 17:08:46 crc kubenswrapper[4747]: I1202 17:08:46.510727 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" event={"ID":"0fb63be9-f6ff-45ad-a564-6e43493ea683","Type":"ContainerDied","Data":"db7b5fe329b19fe6531aa0eb9212fcdb177bacb06f00e3d48b6f9870056274e9"} Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.026255 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.141977 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-repo-setup-combined-ca-bundle\") pod \"0fb63be9-f6ff-45ad-a564-6e43493ea683\" (UID: \"0fb63be9-f6ff-45ad-a564-6e43493ea683\") " Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.142079 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-inventory\") pod \"0fb63be9-f6ff-45ad-a564-6e43493ea683\" (UID: \"0fb63be9-f6ff-45ad-a564-6e43493ea683\") " Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.142106 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lh8z\" (UniqueName: \"kubernetes.io/projected/0fb63be9-f6ff-45ad-a564-6e43493ea683-kube-api-access-9lh8z\") pod \"0fb63be9-f6ff-45ad-a564-6e43493ea683\" (UID: \"0fb63be9-f6ff-45ad-a564-6e43493ea683\") " Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.142225 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-ssh-key\") pod \"0fb63be9-f6ff-45ad-a564-6e43493ea683\" (UID: \"0fb63be9-f6ff-45ad-a564-6e43493ea683\") " Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.149394 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "0fb63be9-f6ff-45ad-a564-6e43493ea683" (UID: "0fb63be9-f6ff-45ad-a564-6e43493ea683"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.151779 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fb63be9-f6ff-45ad-a564-6e43493ea683-kube-api-access-9lh8z" (OuterVolumeSpecName: "kube-api-access-9lh8z") pod "0fb63be9-f6ff-45ad-a564-6e43493ea683" (UID: "0fb63be9-f6ff-45ad-a564-6e43493ea683"). InnerVolumeSpecName "kube-api-access-9lh8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.173167 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0fb63be9-f6ff-45ad-a564-6e43493ea683" (UID: "0fb63be9-f6ff-45ad-a564-6e43493ea683"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.183124 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-inventory" (OuterVolumeSpecName: "inventory") pod "0fb63be9-f6ff-45ad-a564-6e43493ea683" (UID: "0fb63be9-f6ff-45ad-a564-6e43493ea683"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.244372 4747 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.244412 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.244424 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lh8z\" (UniqueName: \"kubernetes.io/projected/0fb63be9-f6ff-45ad-a564-6e43493ea683-kube-api-access-9lh8z\") on node \"crc\" DevicePath \"\"" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.244433 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0fb63be9-f6ff-45ad-a564-6e43493ea683-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.533986 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" event={"ID":"0fb63be9-f6ff-45ad-a564-6e43493ea683","Type":"ContainerDied","Data":"4e691c47a44724fff44430d7ae6453874e3c9fe6ae28180433eba9ae7148308a"} Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.534360 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e691c47a44724fff44430d7ae6453874e3c9fe6ae28180433eba9ae7148308a" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.534289 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.631228 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m"] Dec 02 17:08:48 crc kubenswrapper[4747]: E1202 17:08:48.631668 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fb63be9-f6ff-45ad-a564-6e43493ea683" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.631690 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fb63be9-f6ff-45ad-a564-6e43493ea683" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.632021 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fb63be9-f6ff-45ad-a564-6e43493ea683" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.632772 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.635671 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.635671 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mt9sq" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.637221 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.637454 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.644226 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m"] Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.755550 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0a25e12-2008-4db8-9de5-9656b34976e0-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5xr4m\" (UID: \"a0a25e12-2008-4db8-9de5-9656b34976e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.755739 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mzxx\" (UniqueName: \"kubernetes.io/projected/a0a25e12-2008-4db8-9de5-9656b34976e0-kube-api-access-5mzxx\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5xr4m\" (UID: \"a0a25e12-2008-4db8-9de5-9656b34976e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.755884 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0a25e12-2008-4db8-9de5-9656b34976e0-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5xr4m\" (UID: \"a0a25e12-2008-4db8-9de5-9656b34976e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.857896 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mzxx\" (UniqueName: \"kubernetes.io/projected/a0a25e12-2008-4db8-9de5-9656b34976e0-kube-api-access-5mzxx\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5xr4m\" (UID: \"a0a25e12-2008-4db8-9de5-9656b34976e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.858162 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0a25e12-2008-4db8-9de5-9656b34976e0-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5xr4m\" (UID: \"a0a25e12-2008-4db8-9de5-9656b34976e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.858210 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0a25e12-2008-4db8-9de5-9656b34976e0-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5xr4m\" (UID: \"a0a25e12-2008-4db8-9de5-9656b34976e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.862805 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0a25e12-2008-4db8-9de5-9656b34976e0-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5xr4m\" (UID: \"a0a25e12-2008-4db8-9de5-9656b34976e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.876046 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0a25e12-2008-4db8-9de5-9656b34976e0-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5xr4m\" (UID: \"a0a25e12-2008-4db8-9de5-9656b34976e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.878875 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mzxx\" (UniqueName: \"kubernetes.io/projected/a0a25e12-2008-4db8-9de5-9656b34976e0-kube-api-access-5mzxx\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5xr4m\" (UID: \"a0a25e12-2008-4db8-9de5-9656b34976e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" Dec 02 17:08:48 crc kubenswrapper[4747]: I1202 17:08:48.953620 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" Dec 02 17:08:49 crc kubenswrapper[4747]: I1202 17:08:49.495993 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m"] Dec 02 17:08:49 crc kubenswrapper[4747]: W1202 17:08:49.499441 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0a25e12_2008_4db8_9de5_9656b34976e0.slice/crio-294c0a8b3e761b63cbfac2db78674a2fb88772dca43c93a2041feaadf14a30a0 WatchSource:0}: Error finding container 294c0a8b3e761b63cbfac2db78674a2fb88772dca43c93a2041feaadf14a30a0: Status 404 returned error can't find the container with id 294c0a8b3e761b63cbfac2db78674a2fb88772dca43c93a2041feaadf14a30a0 Dec 02 17:08:49 crc kubenswrapper[4747]: I1202 17:08:49.545802 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" event={"ID":"a0a25e12-2008-4db8-9de5-9656b34976e0","Type":"ContainerStarted","Data":"294c0a8b3e761b63cbfac2db78674a2fb88772dca43c93a2041feaadf14a30a0"} Dec 02 17:08:50 crc kubenswrapper[4747]: I1202 17:08:50.559878 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" event={"ID":"a0a25e12-2008-4db8-9de5-9656b34976e0","Type":"ContainerStarted","Data":"e7f83897b401ab5c5a693a3f6eab5329675a41a73b508bad2e6545fa1efe89cd"} Dec 02 17:08:50 crc kubenswrapper[4747]: I1202 17:08:50.595459 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" podStartSLOduration=2.411241398 podStartE2EDuration="2.595438435s" podCreationTimestamp="2025-12-02 17:08:48 +0000 UTC" firstStartedPulling="2025-12-02 17:08:49.501536465 +0000 UTC m=+1560.028425214" lastFinishedPulling="2025-12-02 17:08:49.685733502 +0000 UTC m=+1560.212622251" observedRunningTime="2025-12-02 17:08:50.590896636 +0000 UTC m=+1561.117785385" watchObservedRunningTime="2025-12-02 17:08:50.595438435 +0000 UTC m=+1561.122327174" Dec 02 17:08:52 crc kubenswrapper[4747]: I1202 17:08:52.580541 4747 generic.go:334] "Generic (PLEG): container finished" podID="a0a25e12-2008-4db8-9de5-9656b34976e0" containerID="e7f83897b401ab5c5a693a3f6eab5329675a41a73b508bad2e6545fa1efe89cd" exitCode=0 Dec 02 17:08:52 crc kubenswrapper[4747]: I1202 17:08:52.580600 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" event={"ID":"a0a25e12-2008-4db8-9de5-9656b34976e0","Type":"ContainerDied","Data":"e7f83897b401ab5c5a693a3f6eab5329675a41a73b508bad2e6545fa1efe89cd"} Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.032648 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.168811 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0a25e12-2008-4db8-9de5-9656b34976e0-inventory\") pod \"a0a25e12-2008-4db8-9de5-9656b34976e0\" (UID: \"a0a25e12-2008-4db8-9de5-9656b34976e0\") " Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.169277 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0a25e12-2008-4db8-9de5-9656b34976e0-ssh-key\") pod \"a0a25e12-2008-4db8-9de5-9656b34976e0\" (UID: \"a0a25e12-2008-4db8-9de5-9656b34976e0\") " Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.169318 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5mzxx\" (UniqueName: \"kubernetes.io/projected/a0a25e12-2008-4db8-9de5-9656b34976e0-kube-api-access-5mzxx\") pod \"a0a25e12-2008-4db8-9de5-9656b34976e0\" (UID: \"a0a25e12-2008-4db8-9de5-9656b34976e0\") " Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.174466 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0a25e12-2008-4db8-9de5-9656b34976e0-kube-api-access-5mzxx" (OuterVolumeSpecName: "kube-api-access-5mzxx") pod "a0a25e12-2008-4db8-9de5-9656b34976e0" (UID: "a0a25e12-2008-4db8-9de5-9656b34976e0"). InnerVolumeSpecName "kube-api-access-5mzxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.195347 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a25e12-2008-4db8-9de5-9656b34976e0-inventory" (OuterVolumeSpecName: "inventory") pod "a0a25e12-2008-4db8-9de5-9656b34976e0" (UID: "a0a25e12-2008-4db8-9de5-9656b34976e0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.197746 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a25e12-2008-4db8-9de5-9656b34976e0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a0a25e12-2008-4db8-9de5-9656b34976e0" (UID: "a0a25e12-2008-4db8-9de5-9656b34976e0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.273007 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0a25e12-2008-4db8-9de5-9656b34976e0-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.274656 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0a25e12-2008-4db8-9de5-9656b34976e0-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.274763 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5mzxx\" (UniqueName: \"kubernetes.io/projected/a0a25e12-2008-4db8-9de5-9656b34976e0-kube-api-access-5mzxx\") on node \"crc\" DevicePath \"\"" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.504802 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6khkx"] Dec 02 17:08:54 crc kubenswrapper[4747]: E1202 17:08:54.505482 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0a25e12-2008-4db8-9de5-9656b34976e0" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.505510 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0a25e12-2008-4db8-9de5-9656b34976e0" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.505743 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0a25e12-2008-4db8-9de5-9656b34976e0" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.507406 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6khkx" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.517190 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6khkx"] Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.581760 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-utilities\") pod \"certified-operators-6khkx\" (UID: \"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0\") " pod="openshift-marketplace/certified-operators-6khkx" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.581850 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ng9f\" (UniqueName: \"kubernetes.io/projected/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-kube-api-access-7ng9f\") pod \"certified-operators-6khkx\" (UID: \"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0\") " pod="openshift-marketplace/certified-operators-6khkx" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.582070 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-catalog-content\") pod \"certified-operators-6khkx\" (UID: \"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0\") " pod="openshift-marketplace/certified-operators-6khkx" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.601447 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" event={"ID":"a0a25e12-2008-4db8-9de5-9656b34976e0","Type":"ContainerDied","Data":"294c0a8b3e761b63cbfac2db78674a2fb88772dca43c93a2041feaadf14a30a0"} Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.601490 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="294c0a8b3e761b63cbfac2db78674a2fb88772dca43c93a2041feaadf14a30a0" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.601526 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5xr4m" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.673632 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd"] Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.675263 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.677730 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mt9sq" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.685591 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.685847 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.686836 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd"] Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.687329 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-utilities\") pod \"certified-operators-6khkx\" (UID: \"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0\") " pod="openshift-marketplace/certified-operators-6khkx" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.687503 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ng9f\" (UniqueName: \"kubernetes.io/projected/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-kube-api-access-7ng9f\") pod \"certified-operators-6khkx\" (UID: \"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0\") " pod="openshift-marketplace/certified-operators-6khkx" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.687627 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-catalog-content\") pod \"certified-operators-6khkx\" (UID: \"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0\") " pod="openshift-marketplace/certified-operators-6khkx" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.688120 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-utilities\") pod \"certified-operators-6khkx\" (UID: \"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0\") " pod="openshift-marketplace/certified-operators-6khkx" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.688294 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-catalog-content\") pod \"certified-operators-6khkx\" (UID: \"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0\") " pod="openshift-marketplace/certified-operators-6khkx" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.688502 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.708645 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ng9f\" (UniqueName: \"kubernetes.io/projected/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-kube-api-access-7ng9f\") pod \"certified-operators-6khkx\" (UID: \"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0\") " pod="openshift-marketplace/certified-operators-6khkx" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.790276 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd\" (UID: \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.790783 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd\" (UID: \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.791115 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd\" (UID: \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.791312 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbg9p\" (UniqueName: \"kubernetes.io/projected/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-kube-api-access-qbg9p\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd\" (UID: \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.834818 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6khkx" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.892935 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd\" (UID: \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.893258 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbg9p\" (UniqueName: \"kubernetes.io/projected/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-kube-api-access-qbg9p\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd\" (UID: \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.893320 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd\" (UID: \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.893356 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd\" (UID: \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.898854 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd\" (UID: \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.898867 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd\" (UID: \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.899223 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd\" (UID: \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" Dec 02 17:08:54 crc kubenswrapper[4747]: I1202 17:08:54.914745 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbg9p\" (UniqueName: \"kubernetes.io/projected/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-kube-api-access-qbg9p\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd\" (UID: \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" Dec 02 17:08:55 crc kubenswrapper[4747]: I1202 17:08:55.060769 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" Dec 02 17:08:55 crc kubenswrapper[4747]: I1202 17:08:55.361657 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6khkx"] Dec 02 17:08:55 crc kubenswrapper[4747]: I1202 17:08:55.614057 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6khkx" event={"ID":"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0","Type":"ContainerStarted","Data":"38f1e4b144f53d9c9386ed2af9a1292410fbdb81ee6632a4523368b5baf3e91f"} Dec 02 17:08:55 crc kubenswrapper[4747]: I1202 17:08:55.634461 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd"] Dec 02 17:08:56 crc kubenswrapper[4747]: I1202 17:08:56.624193 4747 generic.go:334] "Generic (PLEG): container finished" podID="5368b0cf-4205-4cbd-a1f5-13c3b879f6f0" containerID="6302d5f0f35ef78d590d5726d99768a00a04f76d16c64d12560ddb240dba484f" exitCode=0 Dec 02 17:08:56 crc kubenswrapper[4747]: I1202 17:08:56.624244 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6khkx" event={"ID":"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0","Type":"ContainerDied","Data":"6302d5f0f35ef78d590d5726d99768a00a04f76d16c64d12560ddb240dba484f"} Dec 02 17:08:56 crc kubenswrapper[4747]: I1202 17:08:56.628534 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" event={"ID":"0af08a10-c636-49d7-8b9f-ae2bdd2e6371","Type":"ContainerStarted","Data":"7d8a3d48c76d7ecb0074c162c3e226c10d54b2cf672c5393f65c0d2e7d88bff0"} Dec 02 17:08:56 crc kubenswrapper[4747]: I1202 17:08:56.628580 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" event={"ID":"0af08a10-c636-49d7-8b9f-ae2bdd2e6371","Type":"ContainerStarted","Data":"b5515d9ad1cba61e25609c5311f9aeca8bf7fa8f83d3820daef6b04e5d883488"} Dec 02 17:08:56 crc kubenswrapper[4747]: I1202 17:08:56.674328 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" podStartSLOduration=2.471541974 podStartE2EDuration="2.67430546s" podCreationTimestamp="2025-12-02 17:08:54 +0000 UTC" firstStartedPulling="2025-12-02 17:08:55.64292248 +0000 UTC m=+1566.169811229" lastFinishedPulling="2025-12-02 17:08:55.845685966 +0000 UTC m=+1566.372574715" observedRunningTime="2025-12-02 17:08:56.664436169 +0000 UTC m=+1567.191324918" watchObservedRunningTime="2025-12-02 17:08:56.67430546 +0000 UTC m=+1567.201194209" Dec 02 17:08:57 crc kubenswrapper[4747]: I1202 17:08:57.669845 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6khkx" event={"ID":"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0","Type":"ContainerStarted","Data":"2790c37a00e15606a03e208f266acc3dbaf60414a5a5ac1f9a69e2dee47bcf54"} Dec 02 17:08:58 crc kubenswrapper[4747]: I1202 17:08:58.680700 4747 generic.go:334] "Generic (PLEG): container finished" podID="5368b0cf-4205-4cbd-a1f5-13c3b879f6f0" containerID="2790c37a00e15606a03e208f266acc3dbaf60414a5a5ac1f9a69e2dee47bcf54" exitCode=0 Dec 02 17:08:58 crc kubenswrapper[4747]: I1202 17:08:58.680811 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6khkx" event={"ID":"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0","Type":"ContainerDied","Data":"2790c37a00e15606a03e208f266acc3dbaf60414a5a5ac1f9a69e2dee47bcf54"} Dec 02 17:09:00 crc kubenswrapper[4747]: I1202 17:09:00.705309 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6khkx" event={"ID":"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0","Type":"ContainerStarted","Data":"65863fb698f15362f0a0271ed12ec0c7e512c003583e51b7776d38bf6ec969ae"} Dec 02 17:09:00 crc kubenswrapper[4747]: I1202 17:09:00.727762 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6khkx" podStartSLOduration=3.411772527 podStartE2EDuration="6.727741237s" podCreationTimestamp="2025-12-02 17:08:54 +0000 UTC" firstStartedPulling="2025-12-02 17:08:56.627372513 +0000 UTC m=+1567.154261302" lastFinishedPulling="2025-12-02 17:08:59.943341253 +0000 UTC m=+1570.470230012" observedRunningTime="2025-12-02 17:09:00.725917145 +0000 UTC m=+1571.252805894" watchObservedRunningTime="2025-12-02 17:09:00.727741237 +0000 UTC m=+1571.254629986" Dec 02 17:09:04 crc kubenswrapper[4747]: I1202 17:09:04.835217 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6khkx" Dec 02 17:09:04 crc kubenswrapper[4747]: I1202 17:09:04.835780 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6khkx" Dec 02 17:09:04 crc kubenswrapper[4747]: I1202 17:09:04.880577 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6khkx" Dec 02 17:09:05 crc kubenswrapper[4747]: I1202 17:09:05.357467 4747 scope.go:117] "RemoveContainer" containerID="925fd0a24be60f6c05b52e5e85786f38ce9e346d6d05cc7f3f7a87f1bb5f50ec" Dec 02 17:09:05 crc kubenswrapper[4747]: I1202 17:09:05.384112 4747 scope.go:117] "RemoveContainer" containerID="a84d174276f460143d5b1c3ca544fa3eac2ff5e300c56e29fa38cd0957f89aba" Dec 02 17:09:05 crc kubenswrapper[4747]: I1202 17:09:05.425064 4747 scope.go:117] "RemoveContainer" containerID="f62ef644898daa8e6c778da393cb7a04f71b8b12718b54f1fe528d9be108ccf1" Dec 02 17:09:05 crc kubenswrapper[4747]: I1202 17:09:05.453814 4747 scope.go:117] "RemoveContainer" containerID="44856576264b0f4fd85137ff919e55d55c15a42436dc529a3fc8e1261a09442e" Dec 02 17:09:05 crc kubenswrapper[4747]: I1202 17:09:05.496961 4747 scope.go:117] "RemoveContainer" containerID="c34cdfa4c2ef66123fb72191c37c13e6ca03627a6b78dc273fef5849dfd87324" Dec 02 17:09:05 crc kubenswrapper[4747]: I1202 17:09:05.798028 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6khkx" Dec 02 17:09:05 crc kubenswrapper[4747]: I1202 17:09:05.859267 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6khkx"] Dec 02 17:09:07 crc kubenswrapper[4747]: I1202 17:09:07.765969 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6khkx" podUID="5368b0cf-4205-4cbd-a1f5-13c3b879f6f0" containerName="registry-server" containerID="cri-o://65863fb698f15362f0a0271ed12ec0c7e512c003583e51b7776d38bf6ec969ae" gracePeriod=2 Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.245604 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6khkx" Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.366137 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ng9f\" (UniqueName: \"kubernetes.io/projected/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-kube-api-access-7ng9f\") pod \"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0\" (UID: \"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0\") " Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.366245 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-utilities\") pod \"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0\" (UID: \"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0\") " Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.366265 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-catalog-content\") pod \"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0\" (UID: \"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0\") " Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.367152 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-utilities" (OuterVolumeSpecName: "utilities") pod "5368b0cf-4205-4cbd-a1f5-13c3b879f6f0" (UID: "5368b0cf-4205-4cbd-a1f5-13c3b879f6f0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.372259 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-kube-api-access-7ng9f" (OuterVolumeSpecName: "kube-api-access-7ng9f") pod "5368b0cf-4205-4cbd-a1f5-13c3b879f6f0" (UID: "5368b0cf-4205-4cbd-a1f5-13c3b879f6f0"). InnerVolumeSpecName "kube-api-access-7ng9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.412159 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5368b0cf-4205-4cbd-a1f5-13c3b879f6f0" (UID: "5368b0cf-4205-4cbd-a1f5-13c3b879f6f0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.469466 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.469493 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.469503 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ng9f\" (UniqueName: \"kubernetes.io/projected/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0-kube-api-access-7ng9f\") on node \"crc\" DevicePath \"\"" Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.778086 4747 generic.go:334] "Generic (PLEG): container finished" podID="5368b0cf-4205-4cbd-a1f5-13c3b879f6f0" containerID="65863fb698f15362f0a0271ed12ec0c7e512c003583e51b7776d38bf6ec969ae" exitCode=0 Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.778150 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6khkx" event={"ID":"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0","Type":"ContainerDied","Data":"65863fb698f15362f0a0271ed12ec0c7e512c003583e51b7776d38bf6ec969ae"} Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.778180 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6khkx" Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.778205 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6khkx" event={"ID":"5368b0cf-4205-4cbd-a1f5-13c3b879f6f0","Type":"ContainerDied","Data":"38f1e4b144f53d9c9386ed2af9a1292410fbdb81ee6632a4523368b5baf3e91f"} Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.778235 4747 scope.go:117] "RemoveContainer" containerID="65863fb698f15362f0a0271ed12ec0c7e512c003583e51b7776d38bf6ec969ae" Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.817299 4747 scope.go:117] "RemoveContainer" containerID="2790c37a00e15606a03e208f266acc3dbaf60414a5a5ac1f9a69e2dee47bcf54" Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.829198 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6khkx"] Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.842338 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6khkx"] Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.844624 4747 scope.go:117] "RemoveContainer" containerID="6302d5f0f35ef78d590d5726d99768a00a04f76d16c64d12560ddb240dba484f" Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.891469 4747 scope.go:117] "RemoveContainer" containerID="65863fb698f15362f0a0271ed12ec0c7e512c003583e51b7776d38bf6ec969ae" Dec 02 17:09:08 crc kubenswrapper[4747]: E1202 17:09:08.892402 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65863fb698f15362f0a0271ed12ec0c7e512c003583e51b7776d38bf6ec969ae\": container with ID starting with 65863fb698f15362f0a0271ed12ec0c7e512c003583e51b7776d38bf6ec969ae not found: ID does not exist" containerID="65863fb698f15362f0a0271ed12ec0c7e512c003583e51b7776d38bf6ec969ae" Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.892454 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65863fb698f15362f0a0271ed12ec0c7e512c003583e51b7776d38bf6ec969ae"} err="failed to get container status \"65863fb698f15362f0a0271ed12ec0c7e512c003583e51b7776d38bf6ec969ae\": rpc error: code = NotFound desc = could not find container \"65863fb698f15362f0a0271ed12ec0c7e512c003583e51b7776d38bf6ec969ae\": container with ID starting with 65863fb698f15362f0a0271ed12ec0c7e512c003583e51b7776d38bf6ec969ae not found: ID does not exist" Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.892486 4747 scope.go:117] "RemoveContainer" containerID="2790c37a00e15606a03e208f266acc3dbaf60414a5a5ac1f9a69e2dee47bcf54" Dec 02 17:09:08 crc kubenswrapper[4747]: E1202 17:09:08.893034 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2790c37a00e15606a03e208f266acc3dbaf60414a5a5ac1f9a69e2dee47bcf54\": container with ID starting with 2790c37a00e15606a03e208f266acc3dbaf60414a5a5ac1f9a69e2dee47bcf54 not found: ID does not exist" containerID="2790c37a00e15606a03e208f266acc3dbaf60414a5a5ac1f9a69e2dee47bcf54" Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.893058 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2790c37a00e15606a03e208f266acc3dbaf60414a5a5ac1f9a69e2dee47bcf54"} err="failed to get container status \"2790c37a00e15606a03e208f266acc3dbaf60414a5a5ac1f9a69e2dee47bcf54\": rpc error: code = NotFound desc = could not find container \"2790c37a00e15606a03e208f266acc3dbaf60414a5a5ac1f9a69e2dee47bcf54\": container with ID starting with 2790c37a00e15606a03e208f266acc3dbaf60414a5a5ac1f9a69e2dee47bcf54 not found: ID does not exist" Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.893075 4747 scope.go:117] "RemoveContainer" containerID="6302d5f0f35ef78d590d5726d99768a00a04f76d16c64d12560ddb240dba484f" Dec 02 17:09:08 crc kubenswrapper[4747]: E1202 17:09:08.893595 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6302d5f0f35ef78d590d5726d99768a00a04f76d16c64d12560ddb240dba484f\": container with ID starting with 6302d5f0f35ef78d590d5726d99768a00a04f76d16c64d12560ddb240dba484f not found: ID does not exist" containerID="6302d5f0f35ef78d590d5726d99768a00a04f76d16c64d12560ddb240dba484f" Dec 02 17:09:08 crc kubenswrapper[4747]: I1202 17:09:08.893657 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6302d5f0f35ef78d590d5726d99768a00a04f76d16c64d12560ddb240dba484f"} err="failed to get container status \"6302d5f0f35ef78d590d5726d99768a00a04f76d16c64d12560ddb240dba484f\": rpc error: code = NotFound desc = could not find container \"6302d5f0f35ef78d590d5726d99768a00a04f76d16c64d12560ddb240dba484f\": container with ID starting with 6302d5f0f35ef78d590d5726d99768a00a04f76d16c64d12560ddb240dba484f not found: ID does not exist" Dec 02 17:09:09 crc kubenswrapper[4747]: I1202 17:09:09.773397 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5368b0cf-4205-4cbd-a1f5-13c3b879f6f0" path="/var/lib/kubelet/pods/5368b0cf-4205-4cbd-a1f5-13c3b879f6f0/volumes" Dec 02 17:09:39 crc kubenswrapper[4747]: I1202 17:09:39.132361 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bgc7n"] Dec 02 17:09:39 crc kubenswrapper[4747]: E1202 17:09:39.134085 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5368b0cf-4205-4cbd-a1f5-13c3b879f6f0" containerName="extract-utilities" Dec 02 17:09:39 crc kubenswrapper[4747]: I1202 17:09:39.134108 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="5368b0cf-4205-4cbd-a1f5-13c3b879f6f0" containerName="extract-utilities" Dec 02 17:09:39 crc kubenswrapper[4747]: E1202 17:09:39.134156 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5368b0cf-4205-4cbd-a1f5-13c3b879f6f0" containerName="extract-content" Dec 02 17:09:39 crc kubenswrapper[4747]: I1202 17:09:39.134178 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="5368b0cf-4205-4cbd-a1f5-13c3b879f6f0" containerName="extract-content" Dec 02 17:09:39 crc kubenswrapper[4747]: E1202 17:09:39.134197 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5368b0cf-4205-4cbd-a1f5-13c3b879f6f0" containerName="registry-server" Dec 02 17:09:39 crc kubenswrapper[4747]: I1202 17:09:39.134205 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="5368b0cf-4205-4cbd-a1f5-13c3b879f6f0" containerName="registry-server" Dec 02 17:09:39 crc kubenswrapper[4747]: I1202 17:09:39.134535 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="5368b0cf-4205-4cbd-a1f5-13c3b879f6f0" containerName="registry-server" Dec 02 17:09:39 crc kubenswrapper[4747]: I1202 17:09:39.136497 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bgc7n" Dec 02 17:09:39 crc kubenswrapper[4747]: I1202 17:09:39.145442 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bgc7n"] Dec 02 17:09:39 crc kubenswrapper[4747]: I1202 17:09:39.277944 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f993b646-580f-4a5c-87bd-de6edc305201-utilities\") pod \"community-operators-bgc7n\" (UID: \"f993b646-580f-4a5c-87bd-de6edc305201\") " pod="openshift-marketplace/community-operators-bgc7n" Dec 02 17:09:39 crc kubenswrapper[4747]: I1202 17:09:39.278034 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f993b646-580f-4a5c-87bd-de6edc305201-catalog-content\") pod \"community-operators-bgc7n\" (UID: \"f993b646-580f-4a5c-87bd-de6edc305201\") " pod="openshift-marketplace/community-operators-bgc7n" Dec 02 17:09:39 crc kubenswrapper[4747]: I1202 17:09:39.278234 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48qs7\" (UniqueName: \"kubernetes.io/projected/f993b646-580f-4a5c-87bd-de6edc305201-kube-api-access-48qs7\") pod \"community-operators-bgc7n\" (UID: \"f993b646-580f-4a5c-87bd-de6edc305201\") " pod="openshift-marketplace/community-operators-bgc7n" Dec 02 17:09:39 crc kubenswrapper[4747]: I1202 17:09:39.380212 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f993b646-580f-4a5c-87bd-de6edc305201-utilities\") pod \"community-operators-bgc7n\" (UID: \"f993b646-580f-4a5c-87bd-de6edc305201\") " pod="openshift-marketplace/community-operators-bgc7n" Dec 02 17:09:39 crc kubenswrapper[4747]: I1202 17:09:39.380590 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f993b646-580f-4a5c-87bd-de6edc305201-catalog-content\") pod \"community-operators-bgc7n\" (UID: \"f993b646-580f-4a5c-87bd-de6edc305201\") " pod="openshift-marketplace/community-operators-bgc7n" Dec 02 17:09:39 crc kubenswrapper[4747]: I1202 17:09:39.380769 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48qs7\" (UniqueName: \"kubernetes.io/projected/f993b646-580f-4a5c-87bd-de6edc305201-kube-api-access-48qs7\") pod \"community-operators-bgc7n\" (UID: \"f993b646-580f-4a5c-87bd-de6edc305201\") " pod="openshift-marketplace/community-operators-bgc7n" Dec 02 17:09:39 crc kubenswrapper[4747]: I1202 17:09:39.381563 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f993b646-580f-4a5c-87bd-de6edc305201-utilities\") pod \"community-operators-bgc7n\" (UID: \"f993b646-580f-4a5c-87bd-de6edc305201\") " pod="openshift-marketplace/community-operators-bgc7n" Dec 02 17:09:39 crc kubenswrapper[4747]: I1202 17:09:39.382009 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f993b646-580f-4a5c-87bd-de6edc305201-catalog-content\") pod \"community-operators-bgc7n\" (UID: \"f993b646-580f-4a5c-87bd-de6edc305201\") " pod="openshift-marketplace/community-operators-bgc7n" Dec 02 17:09:39 crc kubenswrapper[4747]: I1202 17:09:39.408243 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48qs7\" (UniqueName: \"kubernetes.io/projected/f993b646-580f-4a5c-87bd-de6edc305201-kube-api-access-48qs7\") pod \"community-operators-bgc7n\" (UID: \"f993b646-580f-4a5c-87bd-de6edc305201\") " pod="openshift-marketplace/community-operators-bgc7n" Dec 02 17:09:39 crc kubenswrapper[4747]: I1202 17:09:39.461478 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bgc7n" Dec 02 17:09:40 crc kubenswrapper[4747]: I1202 17:09:40.055964 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bgc7n"] Dec 02 17:09:40 crc kubenswrapper[4747]: I1202 17:09:40.078322 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgc7n" event={"ID":"f993b646-580f-4a5c-87bd-de6edc305201","Type":"ContainerStarted","Data":"aaffddfd7d071e20584487c26f498f8d928c9963fb25521b0e851a475f861de2"} Dec 02 17:09:41 crc kubenswrapper[4747]: I1202 17:09:41.088788 4747 generic.go:334] "Generic (PLEG): container finished" podID="f993b646-580f-4a5c-87bd-de6edc305201" containerID="7e7a8a58d86273ba4d848ad9714c1a735d53bea1e0db6b06f9dd7db7d63678b8" exitCode=0 Dec 02 17:09:41 crc kubenswrapper[4747]: I1202 17:09:41.089070 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgc7n" event={"ID":"f993b646-580f-4a5c-87bd-de6edc305201","Type":"ContainerDied","Data":"7e7a8a58d86273ba4d848ad9714c1a735d53bea1e0db6b06f9dd7db7d63678b8"} Dec 02 17:09:43 crc kubenswrapper[4747]: I1202 17:09:43.106249 4747 generic.go:334] "Generic (PLEG): container finished" podID="f993b646-580f-4a5c-87bd-de6edc305201" containerID="89a3f1b3dd80c67b12349e6b51fafdec6141b375377d4d84b936a2c0147682b4" exitCode=0 Dec 02 17:09:43 crc kubenswrapper[4747]: I1202 17:09:43.106871 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgc7n" event={"ID":"f993b646-580f-4a5c-87bd-de6edc305201","Type":"ContainerDied","Data":"89a3f1b3dd80c67b12349e6b51fafdec6141b375377d4d84b936a2c0147682b4"} Dec 02 17:09:44 crc kubenswrapper[4747]: I1202 17:09:44.117448 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgc7n" event={"ID":"f993b646-580f-4a5c-87bd-de6edc305201","Type":"ContainerStarted","Data":"3b1534385d63e6084b19010eb8159bdde795a3d2b0db5ef3736beeb11e559c11"} Dec 02 17:09:44 crc kubenswrapper[4747]: I1202 17:09:44.140851 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bgc7n" podStartSLOduration=2.709544489 podStartE2EDuration="5.140829087s" podCreationTimestamp="2025-12-02 17:09:39 +0000 UTC" firstStartedPulling="2025-12-02 17:09:41.090957147 +0000 UTC m=+1611.617845896" lastFinishedPulling="2025-12-02 17:09:43.522241745 +0000 UTC m=+1614.049130494" observedRunningTime="2025-12-02 17:09:44.136460212 +0000 UTC m=+1614.663348961" watchObservedRunningTime="2025-12-02 17:09:44.140829087 +0000 UTC m=+1614.667717836" Dec 02 17:09:49 crc kubenswrapper[4747]: I1202 17:09:49.461744 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bgc7n" Dec 02 17:09:49 crc kubenswrapper[4747]: I1202 17:09:49.462273 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bgc7n" Dec 02 17:09:49 crc kubenswrapper[4747]: I1202 17:09:49.515317 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bgc7n" Dec 02 17:09:50 crc kubenswrapper[4747]: I1202 17:09:50.240893 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bgc7n" Dec 02 17:09:50 crc kubenswrapper[4747]: I1202 17:09:50.291322 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bgc7n"] Dec 02 17:09:52 crc kubenswrapper[4747]: I1202 17:09:52.208036 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bgc7n" podUID="f993b646-580f-4a5c-87bd-de6edc305201" containerName="registry-server" containerID="cri-o://3b1534385d63e6084b19010eb8159bdde795a3d2b0db5ef3736beeb11e559c11" gracePeriod=2 Dec 02 17:09:52 crc kubenswrapper[4747]: I1202 17:09:52.740572 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bgc7n" Dec 02 17:09:52 crc kubenswrapper[4747]: I1202 17:09:52.838963 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f993b646-580f-4a5c-87bd-de6edc305201-catalog-content\") pod \"f993b646-580f-4a5c-87bd-de6edc305201\" (UID: \"f993b646-580f-4a5c-87bd-de6edc305201\") " Dec 02 17:09:52 crc kubenswrapper[4747]: I1202 17:09:52.839112 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-48qs7\" (UniqueName: \"kubernetes.io/projected/f993b646-580f-4a5c-87bd-de6edc305201-kube-api-access-48qs7\") pod \"f993b646-580f-4a5c-87bd-de6edc305201\" (UID: \"f993b646-580f-4a5c-87bd-de6edc305201\") " Dec 02 17:09:52 crc kubenswrapper[4747]: I1202 17:09:52.839243 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f993b646-580f-4a5c-87bd-de6edc305201-utilities\") pod \"f993b646-580f-4a5c-87bd-de6edc305201\" (UID: \"f993b646-580f-4a5c-87bd-de6edc305201\") " Dec 02 17:09:52 crc kubenswrapper[4747]: I1202 17:09:52.841062 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f993b646-580f-4a5c-87bd-de6edc305201-utilities" (OuterVolumeSpecName: "utilities") pod "f993b646-580f-4a5c-87bd-de6edc305201" (UID: "f993b646-580f-4a5c-87bd-de6edc305201"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:09:52 crc kubenswrapper[4747]: I1202 17:09:52.848216 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f993b646-580f-4a5c-87bd-de6edc305201-kube-api-access-48qs7" (OuterVolumeSpecName: "kube-api-access-48qs7") pod "f993b646-580f-4a5c-87bd-de6edc305201" (UID: "f993b646-580f-4a5c-87bd-de6edc305201"). InnerVolumeSpecName "kube-api-access-48qs7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:09:52 crc kubenswrapper[4747]: I1202 17:09:52.909813 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f993b646-580f-4a5c-87bd-de6edc305201-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f993b646-580f-4a5c-87bd-de6edc305201" (UID: "f993b646-580f-4a5c-87bd-de6edc305201"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:09:52 crc kubenswrapper[4747]: I1202 17:09:52.941882 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f993b646-580f-4a5c-87bd-de6edc305201-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:09:52 crc kubenswrapper[4747]: I1202 17:09:52.941946 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-48qs7\" (UniqueName: \"kubernetes.io/projected/f993b646-580f-4a5c-87bd-de6edc305201-kube-api-access-48qs7\") on node \"crc\" DevicePath \"\"" Dec 02 17:09:52 crc kubenswrapper[4747]: I1202 17:09:52.941956 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f993b646-580f-4a5c-87bd-de6edc305201-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:09:53 crc kubenswrapper[4747]: I1202 17:09:53.220202 4747 generic.go:334] "Generic (PLEG): container finished" podID="f993b646-580f-4a5c-87bd-de6edc305201" containerID="3b1534385d63e6084b19010eb8159bdde795a3d2b0db5ef3736beeb11e559c11" exitCode=0 Dec 02 17:09:53 crc kubenswrapper[4747]: I1202 17:09:53.220257 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgc7n" event={"ID":"f993b646-580f-4a5c-87bd-de6edc305201","Type":"ContainerDied","Data":"3b1534385d63e6084b19010eb8159bdde795a3d2b0db5ef3736beeb11e559c11"} Dec 02 17:09:53 crc kubenswrapper[4747]: I1202 17:09:53.220305 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bgc7n" event={"ID":"f993b646-580f-4a5c-87bd-de6edc305201","Type":"ContainerDied","Data":"aaffddfd7d071e20584487c26f498f8d928c9963fb25521b0e851a475f861de2"} Dec 02 17:09:53 crc kubenswrapper[4747]: I1202 17:09:53.220335 4747 scope.go:117] "RemoveContainer" containerID="3b1534385d63e6084b19010eb8159bdde795a3d2b0db5ef3736beeb11e559c11" Dec 02 17:09:53 crc kubenswrapper[4747]: I1202 17:09:53.220339 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bgc7n" Dec 02 17:09:53 crc kubenswrapper[4747]: I1202 17:09:53.261217 4747 scope.go:117] "RemoveContainer" containerID="89a3f1b3dd80c67b12349e6b51fafdec6141b375377d4d84b936a2c0147682b4" Dec 02 17:09:53 crc kubenswrapper[4747]: I1202 17:09:53.271200 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bgc7n"] Dec 02 17:09:53 crc kubenswrapper[4747]: I1202 17:09:53.286351 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bgc7n"] Dec 02 17:09:53 crc kubenswrapper[4747]: I1202 17:09:53.309683 4747 scope.go:117] "RemoveContainer" containerID="7e7a8a58d86273ba4d848ad9714c1a735d53bea1e0db6b06f9dd7db7d63678b8" Dec 02 17:09:53 crc kubenswrapper[4747]: I1202 17:09:53.339127 4747 scope.go:117] "RemoveContainer" containerID="3b1534385d63e6084b19010eb8159bdde795a3d2b0db5ef3736beeb11e559c11" Dec 02 17:09:53 crc kubenswrapper[4747]: E1202 17:09:53.339603 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b1534385d63e6084b19010eb8159bdde795a3d2b0db5ef3736beeb11e559c11\": container with ID starting with 3b1534385d63e6084b19010eb8159bdde795a3d2b0db5ef3736beeb11e559c11 not found: ID does not exist" containerID="3b1534385d63e6084b19010eb8159bdde795a3d2b0db5ef3736beeb11e559c11" Dec 02 17:09:53 crc kubenswrapper[4747]: I1202 17:09:53.339640 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b1534385d63e6084b19010eb8159bdde795a3d2b0db5ef3736beeb11e559c11"} err="failed to get container status \"3b1534385d63e6084b19010eb8159bdde795a3d2b0db5ef3736beeb11e559c11\": rpc error: code = NotFound desc = could not find container \"3b1534385d63e6084b19010eb8159bdde795a3d2b0db5ef3736beeb11e559c11\": container with ID starting with 3b1534385d63e6084b19010eb8159bdde795a3d2b0db5ef3736beeb11e559c11 not found: ID does not exist" Dec 02 17:09:53 crc kubenswrapper[4747]: I1202 17:09:53.339665 4747 scope.go:117] "RemoveContainer" containerID="89a3f1b3dd80c67b12349e6b51fafdec6141b375377d4d84b936a2c0147682b4" Dec 02 17:09:53 crc kubenswrapper[4747]: E1202 17:09:53.340495 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89a3f1b3dd80c67b12349e6b51fafdec6141b375377d4d84b936a2c0147682b4\": container with ID starting with 89a3f1b3dd80c67b12349e6b51fafdec6141b375377d4d84b936a2c0147682b4 not found: ID does not exist" containerID="89a3f1b3dd80c67b12349e6b51fafdec6141b375377d4d84b936a2c0147682b4" Dec 02 17:09:53 crc kubenswrapper[4747]: I1202 17:09:53.340519 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89a3f1b3dd80c67b12349e6b51fafdec6141b375377d4d84b936a2c0147682b4"} err="failed to get container status \"89a3f1b3dd80c67b12349e6b51fafdec6141b375377d4d84b936a2c0147682b4\": rpc error: code = NotFound desc = could not find container \"89a3f1b3dd80c67b12349e6b51fafdec6141b375377d4d84b936a2c0147682b4\": container with ID starting with 89a3f1b3dd80c67b12349e6b51fafdec6141b375377d4d84b936a2c0147682b4 not found: ID does not exist" Dec 02 17:09:53 crc kubenswrapper[4747]: I1202 17:09:53.340534 4747 scope.go:117] "RemoveContainer" containerID="7e7a8a58d86273ba4d848ad9714c1a735d53bea1e0db6b06f9dd7db7d63678b8" Dec 02 17:09:53 crc kubenswrapper[4747]: E1202 17:09:53.340868 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e7a8a58d86273ba4d848ad9714c1a735d53bea1e0db6b06f9dd7db7d63678b8\": container with ID starting with 7e7a8a58d86273ba4d848ad9714c1a735d53bea1e0db6b06f9dd7db7d63678b8 not found: ID does not exist" containerID="7e7a8a58d86273ba4d848ad9714c1a735d53bea1e0db6b06f9dd7db7d63678b8" Dec 02 17:09:53 crc kubenswrapper[4747]: I1202 17:09:53.340887 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e7a8a58d86273ba4d848ad9714c1a735d53bea1e0db6b06f9dd7db7d63678b8"} err="failed to get container status \"7e7a8a58d86273ba4d848ad9714c1a735d53bea1e0db6b06f9dd7db7d63678b8\": rpc error: code = NotFound desc = could not find container \"7e7a8a58d86273ba4d848ad9714c1a735d53bea1e0db6b06f9dd7db7d63678b8\": container with ID starting with 7e7a8a58d86273ba4d848ad9714c1a735d53bea1e0db6b06f9dd7db7d63678b8 not found: ID does not exist" Dec 02 17:09:53 crc kubenswrapper[4747]: I1202 17:09:53.774644 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f993b646-580f-4a5c-87bd-de6edc305201" path="/var/lib/kubelet/pods/f993b646-580f-4a5c-87bd-de6edc305201/volumes" Dec 02 17:10:01 crc kubenswrapper[4747]: I1202 17:10:01.795181 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:10:01 crc kubenswrapper[4747]: I1202 17:10:01.797041 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:10:05 crc kubenswrapper[4747]: I1202 17:10:05.681122 4747 scope.go:117] "RemoveContainer" containerID="626edf5c8b730915ff5dfe7a71e0ba211533522640a80e114136d58d0e81b81c" Dec 02 17:10:05 crc kubenswrapper[4747]: I1202 17:10:05.738135 4747 scope.go:117] "RemoveContainer" containerID="47afa13ede895aa4912bf6a9039ad232fe30afdb00f9a44fbdc816196a3adad2" Dec 02 17:10:05 crc kubenswrapper[4747]: I1202 17:10:05.877969 4747 scope.go:117] "RemoveContainer" containerID="1b04ae22b9c96e3ac7f64d88ed4695cc04c49db66143b17d7b7c24d4dba90371" Dec 02 17:10:05 crc kubenswrapper[4747]: I1202 17:10:05.897567 4747 scope.go:117] "RemoveContainer" containerID="d7b83f13de7d9765e6fa532e061fb77c43f20f6c68ce8ba5959efba12a03f098" Dec 02 17:10:05 crc kubenswrapper[4747]: I1202 17:10:05.918752 4747 scope.go:117] "RemoveContainer" containerID="4b3de528fe544733b59a27fd0ede0471796837f58ac4df3491c6fe0b7a5190eb" Dec 02 17:10:31 crc kubenswrapper[4747]: I1202 17:10:31.795057 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:10:31 crc kubenswrapper[4747]: I1202 17:10:31.795635 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:11:01 crc kubenswrapper[4747]: I1202 17:11:01.795352 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:11:01 crc kubenswrapper[4747]: I1202 17:11:01.797341 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:11:01 crc kubenswrapper[4747]: I1202 17:11:01.797542 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 17:11:01 crc kubenswrapper[4747]: I1202 17:11:01.798772 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705"} pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 17:11:01 crc kubenswrapper[4747]: I1202 17:11:01.799082 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" containerID="cri-o://cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" gracePeriod=600 Dec 02 17:11:01 crc kubenswrapper[4747]: E1202 17:11:01.928773 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:11:02 crc kubenswrapper[4747]: I1202 17:11:02.870669 4747 generic.go:334] "Generic (PLEG): container finished" podID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" exitCode=0 Dec 02 17:11:02 crc kubenswrapper[4747]: I1202 17:11:02.870724 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerDied","Data":"cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705"} Dec 02 17:11:02 crc kubenswrapper[4747]: I1202 17:11:02.870794 4747 scope.go:117] "RemoveContainer" containerID="a9cd5048f2dad4e3a491049097b9d8740c67cd00c6933cd94235f0d88bac7953" Dec 02 17:11:02 crc kubenswrapper[4747]: I1202 17:11:02.871651 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:11:02 crc kubenswrapper[4747]: E1202 17:11:02.872130 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:11:14 crc kubenswrapper[4747]: I1202 17:11:14.761032 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:11:14 crc kubenswrapper[4747]: E1202 17:11:14.761697 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:11:29 crc kubenswrapper[4747]: I1202 17:11:29.768859 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:11:29 crc kubenswrapper[4747]: E1202 17:11:29.769838 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:11:44 crc kubenswrapper[4747]: I1202 17:11:44.761336 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:11:44 crc kubenswrapper[4747]: E1202 17:11:44.761976 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:11:58 crc kubenswrapper[4747]: I1202 17:11:58.760825 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:11:58 crc kubenswrapper[4747]: E1202 17:11:58.761551 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:12:02 crc kubenswrapper[4747]: I1202 17:12:02.065203 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-ltp7l"] Dec 02 17:12:02 crc kubenswrapper[4747]: I1202 17:12:02.079608 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-5jbl9"] Dec 02 17:12:02 crc kubenswrapper[4747]: I1202 17:12:02.091229 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-w5brp"] Dec 02 17:12:02 crc kubenswrapper[4747]: I1202 17:12:02.101426 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-ltp7l"] Dec 02 17:12:02 crc kubenswrapper[4747]: I1202 17:12:02.113454 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-5jbl9"] Dec 02 17:12:02 crc kubenswrapper[4747]: I1202 17:12:02.124211 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-w5brp"] Dec 02 17:12:03 crc kubenswrapper[4747]: I1202 17:12:03.774519 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98c89af3-bb13-47c8-b976-01425ab50970" path="/var/lib/kubelet/pods/98c89af3-bb13-47c8-b976-01425ab50970/volumes" Dec 02 17:12:03 crc kubenswrapper[4747]: I1202 17:12:03.775246 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8a0631b-2dd4-49ec-895d-2423641e28f7" path="/var/lib/kubelet/pods/b8a0631b-2dd4-49ec-895d-2423641e28f7/volumes" Dec 02 17:12:03 crc kubenswrapper[4747]: I1202 17:12:03.775955 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b95920cc-6bf6-4182-b9c2-514a5348e275" path="/var/lib/kubelet/pods/b95920cc-6bf6-4182-b9c2-514a5348e275/volumes" Dec 02 17:12:06 crc kubenswrapper[4747]: I1202 17:12:06.042920 4747 scope.go:117] "RemoveContainer" containerID="3be40e0899dc07eb1d41f9d34c3bb5ed5165bd3be8a01e08b55b8a19220cd297" Dec 02 17:12:06 crc kubenswrapper[4747]: I1202 17:12:06.078953 4747 scope.go:117] "RemoveContainer" containerID="296eaf44b3c23bd18a39ab14fc9f9e0d7453a34181c3f74f755554daf5d57f70" Dec 02 17:12:06 crc kubenswrapper[4747]: I1202 17:12:06.140732 4747 scope.go:117] "RemoveContainer" containerID="c2b6241b271b4486d7c17b097eeb5279c76e81e3bcc078689890e85063a4a3c7" Dec 02 17:12:12 crc kubenswrapper[4747]: I1202 17:12:12.048786 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-153d-account-create-xtgz6"] Dec 02 17:12:12 crc kubenswrapper[4747]: I1202 17:12:12.061745 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-0c4c-account-create-kfhgj"] Dec 02 17:12:12 crc kubenswrapper[4747]: I1202 17:12:12.070744 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-0c4c-account-create-kfhgj"] Dec 02 17:12:12 crc kubenswrapper[4747]: I1202 17:12:12.081972 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-58f5-account-create-ffwtb"] Dec 02 17:12:12 crc kubenswrapper[4747]: I1202 17:12:12.092403 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-153d-account-create-xtgz6"] Dec 02 17:12:12 crc kubenswrapper[4747]: I1202 17:12:12.101567 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-58f5-account-create-ffwtb"] Dec 02 17:12:13 crc kubenswrapper[4747]: I1202 17:12:13.761498 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:12:13 crc kubenswrapper[4747]: E1202 17:12:13.762964 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:12:13 crc kubenswrapper[4747]: I1202 17:12:13.773545 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ca820aa-feda-455b-9462-da25b82261e5" path="/var/lib/kubelet/pods/3ca820aa-feda-455b-9462-da25b82261e5/volumes" Dec 02 17:12:13 crc kubenswrapper[4747]: I1202 17:12:13.774130 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4aa0901c-2ff9-478e-b4f2-b76fe0914cae" path="/var/lib/kubelet/pods/4aa0901c-2ff9-478e-b4f2-b76fe0914cae/volumes" Dec 02 17:12:13 crc kubenswrapper[4747]: I1202 17:12:13.774724 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c03aa83-6eec-4627-8fd9-a05bbba2c70c" path="/var/lib/kubelet/pods/7c03aa83-6eec-4627-8fd9-a05bbba2c70c/volumes" Dec 02 17:12:17 crc kubenswrapper[4747]: I1202 17:12:17.630243 4747 generic.go:334] "Generic (PLEG): container finished" podID="0af08a10-c636-49d7-8b9f-ae2bdd2e6371" containerID="7d8a3d48c76d7ecb0074c162c3e226c10d54b2cf672c5393f65c0d2e7d88bff0" exitCode=0 Dec 02 17:12:17 crc kubenswrapper[4747]: I1202 17:12:17.630323 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" event={"ID":"0af08a10-c636-49d7-8b9f-ae2bdd2e6371","Type":"ContainerDied","Data":"7d8a3d48c76d7ecb0074c162c3e226c10d54b2cf672c5393f65c0d2e7d88bff0"} Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.052704 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.165504 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-ssh-key\") pod \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\" (UID: \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\") " Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.165565 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-inventory\") pod \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\" (UID: \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\") " Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.165652 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbg9p\" (UniqueName: \"kubernetes.io/projected/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-kube-api-access-qbg9p\") pod \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\" (UID: \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\") " Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.165840 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-bootstrap-combined-ca-bundle\") pod \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\" (UID: \"0af08a10-c636-49d7-8b9f-ae2bdd2e6371\") " Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.171321 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-kube-api-access-qbg9p" (OuterVolumeSpecName: "kube-api-access-qbg9p") pod "0af08a10-c636-49d7-8b9f-ae2bdd2e6371" (UID: "0af08a10-c636-49d7-8b9f-ae2bdd2e6371"). InnerVolumeSpecName "kube-api-access-qbg9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.171692 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "0af08a10-c636-49d7-8b9f-ae2bdd2e6371" (UID: "0af08a10-c636-49d7-8b9f-ae2bdd2e6371"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.191272 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-inventory" (OuterVolumeSpecName: "inventory") pod "0af08a10-c636-49d7-8b9f-ae2bdd2e6371" (UID: "0af08a10-c636-49d7-8b9f-ae2bdd2e6371"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.191461 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0af08a10-c636-49d7-8b9f-ae2bdd2e6371" (UID: "0af08a10-c636-49d7-8b9f-ae2bdd2e6371"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.268407 4747 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.268447 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.268459 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.268470 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbg9p\" (UniqueName: \"kubernetes.io/projected/0af08a10-c636-49d7-8b9f-ae2bdd2e6371-kube-api-access-qbg9p\") on node \"crc\" DevicePath \"\"" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.651597 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" event={"ID":"0af08a10-c636-49d7-8b9f-ae2bdd2e6371","Type":"ContainerDied","Data":"b5515d9ad1cba61e25609c5311f9aeca8bf7fa8f83d3820daef6b04e5d883488"} Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.652137 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5515d9ad1cba61e25609c5311f9aeca8bf7fa8f83d3820daef6b04e5d883488" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.651697 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.734530 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb"] Dec 02 17:12:19 crc kubenswrapper[4747]: E1202 17:12:19.735001 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f993b646-580f-4a5c-87bd-de6edc305201" containerName="registry-server" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.735022 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f993b646-580f-4a5c-87bd-de6edc305201" containerName="registry-server" Dec 02 17:12:19 crc kubenswrapper[4747]: E1202 17:12:19.735038 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f993b646-580f-4a5c-87bd-de6edc305201" containerName="extract-content" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.735047 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f993b646-580f-4a5c-87bd-de6edc305201" containerName="extract-content" Dec 02 17:12:19 crc kubenswrapper[4747]: E1202 17:12:19.735080 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f993b646-580f-4a5c-87bd-de6edc305201" containerName="extract-utilities" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.735089 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f993b646-580f-4a5c-87bd-de6edc305201" containerName="extract-utilities" Dec 02 17:12:19 crc kubenswrapper[4747]: E1202 17:12:19.735117 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0af08a10-c636-49d7-8b9f-ae2bdd2e6371" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.735126 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0af08a10-c636-49d7-8b9f-ae2bdd2e6371" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.735356 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f993b646-580f-4a5c-87bd-de6edc305201" containerName="registry-server" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.735390 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="0af08a10-c636-49d7-8b9f-ae2bdd2e6371" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.736195 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.825161 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pccj2\" (UniqueName: \"kubernetes.io/projected/df3869df-566d-4296-9b5d-555260ca14dd-kube-api-access-pccj2\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q57jb\" (UID: \"df3869df-566d-4296-9b5d-555260ca14dd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.825675 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/df3869df-566d-4296-9b5d-555260ca14dd-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q57jb\" (UID: \"df3869df-566d-4296-9b5d-555260ca14dd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.825991 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df3869df-566d-4296-9b5d-555260ca14dd-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q57jb\" (UID: \"df3869df-566d-4296-9b5d-555260ca14dd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.831974 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.832094 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.832190 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.832415 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mt9sq" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.838408 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb"] Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.928221 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df3869df-566d-4296-9b5d-555260ca14dd-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q57jb\" (UID: \"df3869df-566d-4296-9b5d-555260ca14dd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.928474 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pccj2\" (UniqueName: \"kubernetes.io/projected/df3869df-566d-4296-9b5d-555260ca14dd-kube-api-access-pccj2\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q57jb\" (UID: \"df3869df-566d-4296-9b5d-555260ca14dd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.928533 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/df3869df-566d-4296-9b5d-555260ca14dd-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q57jb\" (UID: \"df3869df-566d-4296-9b5d-555260ca14dd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.934522 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df3869df-566d-4296-9b5d-555260ca14dd-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q57jb\" (UID: \"df3869df-566d-4296-9b5d-555260ca14dd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.935420 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/df3869df-566d-4296-9b5d-555260ca14dd-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q57jb\" (UID: \"df3869df-566d-4296-9b5d-555260ca14dd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" Dec 02 17:12:19 crc kubenswrapper[4747]: I1202 17:12:19.944653 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pccj2\" (UniqueName: \"kubernetes.io/projected/df3869df-566d-4296-9b5d-555260ca14dd-kube-api-access-pccj2\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-q57jb\" (UID: \"df3869df-566d-4296-9b5d-555260ca14dd\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" Dec 02 17:12:20 crc kubenswrapper[4747]: I1202 17:12:20.149875 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" Dec 02 17:12:20 crc kubenswrapper[4747]: I1202 17:12:20.703597 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 17:12:20 crc kubenswrapper[4747]: I1202 17:12:20.704018 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb"] Dec 02 17:12:21 crc kubenswrapper[4747]: I1202 17:12:21.669701 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" event={"ID":"df3869df-566d-4296-9b5d-555260ca14dd","Type":"ContainerStarted","Data":"7d7a79e74c48ea5e1628c0b82d67e27623827d4d0da472f58628befbefed6ad7"} Dec 02 17:12:26 crc kubenswrapper[4747]: I1202 17:12:26.761008 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:12:26 crc kubenswrapper[4747]: E1202 17:12:26.761682 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:12:33 crc kubenswrapper[4747]: I1202 17:12:33.799189 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" event={"ID":"df3869df-566d-4296-9b5d-555260ca14dd","Type":"ContainerStarted","Data":"97330b9c7cb9ea20bcb1a0d56caefae7dbbf7de231632b2d967ea297ef5108c9"} Dec 02 17:12:33 crc kubenswrapper[4747]: I1202 17:12:33.828458 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" podStartSLOduration=2.680984504 podStartE2EDuration="14.828431069s" podCreationTimestamp="2025-12-02 17:12:19 +0000 UTC" firstStartedPulling="2025-12-02 17:12:20.703300285 +0000 UTC m=+1771.230189034" lastFinishedPulling="2025-12-02 17:12:32.85074685 +0000 UTC m=+1783.377635599" observedRunningTime="2025-12-02 17:12:33.819458593 +0000 UTC m=+1784.346347382" watchObservedRunningTime="2025-12-02 17:12:33.828431069 +0000 UTC m=+1784.355319818" Dec 02 17:12:37 crc kubenswrapper[4747]: I1202 17:12:37.760610 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:12:39 crc kubenswrapper[4747]: E1202 17:12:37.761388 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:12:41 crc kubenswrapper[4747]: I1202 17:12:41.041621 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-hkz2r"] Dec 02 17:12:41 crc kubenswrapper[4747]: I1202 17:12:41.052302 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-hkz2r"] Dec 02 17:12:41 crc kubenswrapper[4747]: I1202 17:12:41.775310 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba0dc134-3984-4156-8198-7880a8616b44" path="/var/lib/kubelet/pods/ba0dc134-3984-4156-8198-7880a8616b44/volumes" Dec 02 17:12:50 crc kubenswrapper[4747]: I1202 17:12:50.054963 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-l8kgp"] Dec 02 17:12:50 crc kubenswrapper[4747]: I1202 17:12:50.072965 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-nbl78"] Dec 02 17:12:50 crc kubenswrapper[4747]: I1202 17:12:50.089623 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-8xlw7"] Dec 02 17:12:50 crc kubenswrapper[4747]: I1202 17:12:50.098297 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-8xlw7"] Dec 02 17:12:50 crc kubenswrapper[4747]: I1202 17:12:50.106171 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-l8kgp"] Dec 02 17:12:50 crc kubenswrapper[4747]: I1202 17:12:50.113551 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-nbl78"] Dec 02 17:12:50 crc kubenswrapper[4747]: I1202 17:12:50.760781 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:12:50 crc kubenswrapper[4747]: E1202 17:12:50.761400 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:12:51 crc kubenswrapper[4747]: I1202 17:12:51.774573 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16809698-14ec-49bf-81cf-a4aab9e6dd62" path="/var/lib/kubelet/pods/16809698-14ec-49bf-81cf-a4aab9e6dd62/volumes" Dec 02 17:12:51 crc kubenswrapper[4747]: I1202 17:12:51.776068 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63d1c8ec-685f-46b7-859f-b32d265f3504" path="/var/lib/kubelet/pods/63d1c8ec-685f-46b7-859f-b32d265f3504/volumes" Dec 02 17:12:51 crc kubenswrapper[4747]: I1202 17:12:51.777343 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bf036a6-0cd6-4f08-a963-bb63f45d14a6" path="/var/lib/kubelet/pods/6bf036a6-0cd6-4f08-a963-bb63f45d14a6/volumes" Dec 02 17:13:02 crc kubenswrapper[4747]: I1202 17:13:02.056692 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-b45f-account-create-zsdqj"] Dec 02 17:13:02 crc kubenswrapper[4747]: I1202 17:13:02.076239 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-49fa-account-create-9bzgd"] Dec 02 17:13:02 crc kubenswrapper[4747]: I1202 17:13:02.091730 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-caab-account-create-fzvvr"] Dec 02 17:13:02 crc kubenswrapper[4747]: I1202 17:13:02.103617 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-b45f-account-create-zsdqj"] Dec 02 17:13:02 crc kubenswrapper[4747]: I1202 17:13:02.113972 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-49fa-account-create-9bzgd"] Dec 02 17:13:02 crc kubenswrapper[4747]: I1202 17:13:02.122774 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-caab-account-create-fzvvr"] Dec 02 17:13:03 crc kubenswrapper[4747]: I1202 17:13:03.772827 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34c0b183-b0c0-4ce9-b869-1e9c0f5406a7" path="/var/lib/kubelet/pods/34c0b183-b0c0-4ce9-b869-1e9c0f5406a7/volumes" Dec 02 17:13:03 crc kubenswrapper[4747]: I1202 17:13:03.773669 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b29989e1-d454-4535-b5b2-cae16c355c4a" path="/var/lib/kubelet/pods/b29989e1-d454-4535-b5b2-cae16c355c4a/volumes" Dec 02 17:13:03 crc kubenswrapper[4747]: I1202 17:13:03.774507 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd662fa1-bbd1-4df5-8209-a062930ce5a5" path="/var/lib/kubelet/pods/bd662fa1-bbd1-4df5-8209-a062930ce5a5/volumes" Dec 02 17:13:04 crc kubenswrapper[4747]: I1202 17:13:04.057218 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-fpmlm"] Dec 02 17:13:04 crc kubenswrapper[4747]: I1202 17:13:04.066725 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-fpmlm"] Dec 02 17:13:05 crc kubenswrapper[4747]: I1202 17:13:05.760405 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:13:05 crc kubenswrapper[4747]: E1202 17:13:05.760965 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:13:05 crc kubenswrapper[4747]: I1202 17:13:05.775277 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58d94d7e-5759-47cf-9920-db9d5fa862b3" path="/var/lib/kubelet/pods/58d94d7e-5759-47cf-9920-db9d5fa862b3/volumes" Dec 02 17:13:06 crc kubenswrapper[4747]: I1202 17:13:06.261785 4747 scope.go:117] "RemoveContainer" containerID="e3473243b1b146a48828280d697b5c3ee79c1b98feaa0f4a2fd88985ce7816c5" Dec 02 17:13:06 crc kubenswrapper[4747]: I1202 17:13:06.317725 4747 scope.go:117] "RemoveContainer" containerID="6c636c3ef4af986fda8151e8b6b39bf06fbd0f526e5e7e0262a3309590629e14" Dec 02 17:13:06 crc kubenswrapper[4747]: I1202 17:13:06.340468 4747 scope.go:117] "RemoveContainer" containerID="bf5b9b8a0d33ba8b010d22ef8019aba459d24d05edd4531489345c7d01bdeb5e" Dec 02 17:13:06 crc kubenswrapper[4747]: I1202 17:13:06.383886 4747 scope.go:117] "RemoveContainer" containerID="f3cc78ea769ccab77ba67659ff16524b4a7d422ce28605ca4bae0f6114becec2" Dec 02 17:13:06 crc kubenswrapper[4747]: I1202 17:13:06.456714 4747 scope.go:117] "RemoveContainer" containerID="8e078537ea2cc73e2b2a00eaac9899af0dd22b96c4c792ac720a4ececd968c78" Dec 02 17:13:06 crc kubenswrapper[4747]: I1202 17:13:06.486643 4747 scope.go:117] "RemoveContainer" containerID="fe8ff0e6516ddf121c20eb6a803fdb239a61d9e9a4c55527a7139665124ff206" Dec 02 17:13:06 crc kubenswrapper[4747]: I1202 17:13:06.528052 4747 scope.go:117] "RemoveContainer" containerID="080e1fe456b3764e74df995bcbc9f90df6e09d02c8d5c28a3c81192046d34935" Dec 02 17:13:06 crc kubenswrapper[4747]: I1202 17:13:06.551009 4747 scope.go:117] "RemoveContainer" containerID="d31bfbc4f2a3336dc57eeb4a557e764dd8f16fd2cc74aab31ffbff6d17c9e25d" Dec 02 17:13:06 crc kubenswrapper[4747]: I1202 17:13:06.572423 4747 scope.go:117] "RemoveContainer" containerID="738814264b3583f0b3849cd96793b640f5ef7333533695943bf6c4a63fdac69a" Dec 02 17:13:06 crc kubenswrapper[4747]: I1202 17:13:06.592385 4747 scope.go:117] "RemoveContainer" containerID="e5becf69fbc53012b191926c11d9f5eb03767088d56b00a888e3eb02f5cec38a" Dec 02 17:13:06 crc kubenswrapper[4747]: I1202 17:13:06.613247 4747 scope.go:117] "RemoveContainer" containerID="6558a067452c1230d859787756352a8f9cfc89ce456eed74d70ce9f6c9fa8025" Dec 02 17:13:16 crc kubenswrapper[4747]: I1202 17:13:16.760959 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:13:16 crc kubenswrapper[4747]: E1202 17:13:16.762154 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:13:29 crc kubenswrapper[4747]: I1202 17:13:29.770352 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:13:29 crc kubenswrapper[4747]: E1202 17:13:29.771079 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:13:43 crc kubenswrapper[4747]: I1202 17:13:43.772789 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:13:43 crc kubenswrapper[4747]: E1202 17:13:43.778726 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:13:44 crc kubenswrapper[4747]: I1202 17:13:44.080178 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-q5btv"] Dec 02 17:13:44 crc kubenswrapper[4747]: I1202 17:13:44.091792 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-q5btv"] Dec 02 17:13:45 crc kubenswrapper[4747]: I1202 17:13:45.037333 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-sclpf"] Dec 02 17:13:45 crc kubenswrapper[4747]: I1202 17:13:45.052404 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-sclpf"] Dec 02 17:13:45 crc kubenswrapper[4747]: I1202 17:13:45.778288 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8" path="/var/lib/kubelet/pods/b287bcaf-56ce-4ca5-8e41-9a733bf9eeb8/volumes" Dec 02 17:13:45 crc kubenswrapper[4747]: I1202 17:13:45.779376 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d05e3f55-d7e5-417a-9344-e24038aaa516" path="/var/lib/kubelet/pods/d05e3f55-d7e5-417a-9344-e24038aaa516/volumes" Dec 02 17:13:54 crc kubenswrapper[4747]: I1202 17:13:54.043827 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-9gbx8"] Dec 02 17:13:54 crc kubenswrapper[4747]: I1202 17:13:54.058660 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-9gbx8"] Dec 02 17:13:54 crc kubenswrapper[4747]: I1202 17:13:54.760934 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:13:54 crc kubenswrapper[4747]: E1202 17:13:54.761460 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:13:55 crc kubenswrapper[4747]: I1202 17:13:55.778110 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61d3d063-44d5-43ee-9b9e-a4560c33f775" path="/var/lib/kubelet/pods/61d3d063-44d5-43ee-9b9e-a4560c33f775/volumes" Dec 02 17:14:04 crc kubenswrapper[4747]: I1202 17:14:04.048990 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-bgbtw"] Dec 02 17:14:04 crc kubenswrapper[4747]: I1202 17:14:04.066723 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-6f4g4"] Dec 02 17:14:04 crc kubenswrapper[4747]: I1202 17:14:04.078393 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-bgbtw"] Dec 02 17:14:04 crc kubenswrapper[4747]: I1202 17:14:04.087017 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-6f4g4"] Dec 02 17:14:05 crc kubenswrapper[4747]: I1202 17:14:05.774650 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2" path="/var/lib/kubelet/pods/3b1e1c01-1650-43fb-9ff9-47a5b1c5c3e2/volumes" Dec 02 17:14:05 crc kubenswrapper[4747]: I1202 17:14:05.777085 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3d0edb2-cca7-4f61-bf72-c3fddb909fea" path="/var/lib/kubelet/pods/b3d0edb2-cca7-4f61-bf72-c3fddb909fea/volumes" Dec 02 17:14:06 crc kubenswrapper[4747]: I1202 17:14:06.869811 4747 scope.go:117] "RemoveContainer" containerID="671a7b7e02a8969d6c8778766b1de862a5c16326e729be03b1c2ff650e8766fe" Dec 02 17:14:06 crc kubenswrapper[4747]: I1202 17:14:06.920448 4747 scope.go:117] "RemoveContainer" containerID="9c22ee406fa8516908ec60f0cc4a3a8dacc8dcb202d0f8112b59ee1aa68bf9e0" Dec 02 17:14:06 crc kubenswrapper[4747]: I1202 17:14:06.963946 4747 scope.go:117] "RemoveContainer" containerID="63e459cef3c1eb70f665dfb15828b42966bc3b519f61f757c46be948d23a88e0" Dec 02 17:14:07 crc kubenswrapper[4747]: I1202 17:14:07.000104 4747 scope.go:117] "RemoveContainer" containerID="fc2bc18dd44a49460d0c436c80a85d551e46008ce9a3b3e10dd6430713051a0b" Dec 02 17:14:07 crc kubenswrapper[4747]: I1202 17:14:07.075675 4747 scope.go:117] "RemoveContainer" containerID="3e8ff609f25fdea982bb3091ca88896f7e8d46afd4c12cc4421b3ab74c34c3cc" Dec 02 17:14:07 crc kubenswrapper[4747]: I1202 17:14:07.738819 4747 generic.go:334] "Generic (PLEG): container finished" podID="df3869df-566d-4296-9b5d-555260ca14dd" containerID="97330b9c7cb9ea20bcb1a0d56caefae7dbbf7de231632b2d967ea297ef5108c9" exitCode=0 Dec 02 17:14:07 crc kubenswrapper[4747]: I1202 17:14:07.738870 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" event={"ID":"df3869df-566d-4296-9b5d-555260ca14dd","Type":"ContainerDied","Data":"97330b9c7cb9ea20bcb1a0d56caefae7dbbf7de231632b2d967ea297ef5108c9"} Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.214191 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.332614 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df3869df-566d-4296-9b5d-555260ca14dd-inventory\") pod \"df3869df-566d-4296-9b5d-555260ca14dd\" (UID: \"df3869df-566d-4296-9b5d-555260ca14dd\") " Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.332760 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/df3869df-566d-4296-9b5d-555260ca14dd-ssh-key\") pod \"df3869df-566d-4296-9b5d-555260ca14dd\" (UID: \"df3869df-566d-4296-9b5d-555260ca14dd\") " Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.332899 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pccj2\" (UniqueName: \"kubernetes.io/projected/df3869df-566d-4296-9b5d-555260ca14dd-kube-api-access-pccj2\") pod \"df3869df-566d-4296-9b5d-555260ca14dd\" (UID: \"df3869df-566d-4296-9b5d-555260ca14dd\") " Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.346187 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df3869df-566d-4296-9b5d-555260ca14dd-kube-api-access-pccj2" (OuterVolumeSpecName: "kube-api-access-pccj2") pod "df3869df-566d-4296-9b5d-555260ca14dd" (UID: "df3869df-566d-4296-9b5d-555260ca14dd"). InnerVolumeSpecName "kube-api-access-pccj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.367486 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df3869df-566d-4296-9b5d-555260ca14dd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "df3869df-566d-4296-9b5d-555260ca14dd" (UID: "df3869df-566d-4296-9b5d-555260ca14dd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.368232 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df3869df-566d-4296-9b5d-555260ca14dd-inventory" (OuterVolumeSpecName: "inventory") pod "df3869df-566d-4296-9b5d-555260ca14dd" (UID: "df3869df-566d-4296-9b5d-555260ca14dd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.435239 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/df3869df-566d-4296-9b5d-555260ca14dd-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.435267 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/df3869df-566d-4296-9b5d-555260ca14dd-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.435278 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pccj2\" (UniqueName: \"kubernetes.io/projected/df3869df-566d-4296-9b5d-555260ca14dd-kube-api-access-pccj2\") on node \"crc\" DevicePath \"\"" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.770585 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.771512 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:14:09 crc kubenswrapper[4747]: E1202 17:14:09.772140 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.774508 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-q57jb" event={"ID":"df3869df-566d-4296-9b5d-555260ca14dd","Type":"ContainerDied","Data":"7d7a79e74c48ea5e1628c0b82d67e27623827d4d0da472f58628befbefed6ad7"} Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.774564 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d7a79e74c48ea5e1628c0b82d67e27623827d4d0da472f58628befbefed6ad7" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.859459 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf"] Dec 02 17:14:09 crc kubenswrapper[4747]: E1202 17:14:09.860008 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df3869df-566d-4296-9b5d-555260ca14dd" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.860034 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="df3869df-566d-4296-9b5d-555260ca14dd" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.860340 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="df3869df-566d-4296-9b5d-555260ca14dd" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.861167 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.864241 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.864362 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.864241 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mt9sq" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.865588 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 17:14:09 crc kubenswrapper[4747]: I1202 17:14:09.873362 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf"] Dec 02 17:14:10 crc kubenswrapper[4747]: I1202 17:14:10.047561 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfv4z\" (UniqueName: \"kubernetes.io/projected/cec55b8b-af9a-473f-b92b-e3008a596073-kube-api-access-mfv4z\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf\" (UID: \"cec55b8b-af9a-473f-b92b-e3008a596073\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" Dec 02 17:14:10 crc kubenswrapper[4747]: I1202 17:14:10.047669 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cec55b8b-af9a-473f-b92b-e3008a596073-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf\" (UID: \"cec55b8b-af9a-473f-b92b-e3008a596073\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" Dec 02 17:14:10 crc kubenswrapper[4747]: I1202 17:14:10.047794 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cec55b8b-af9a-473f-b92b-e3008a596073-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf\" (UID: \"cec55b8b-af9a-473f-b92b-e3008a596073\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" Dec 02 17:14:10 crc kubenswrapper[4747]: I1202 17:14:10.149756 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cec55b8b-af9a-473f-b92b-e3008a596073-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf\" (UID: \"cec55b8b-af9a-473f-b92b-e3008a596073\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" Dec 02 17:14:10 crc kubenswrapper[4747]: I1202 17:14:10.150077 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cec55b8b-af9a-473f-b92b-e3008a596073-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf\" (UID: \"cec55b8b-af9a-473f-b92b-e3008a596073\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" Dec 02 17:14:10 crc kubenswrapper[4747]: I1202 17:14:10.150234 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfv4z\" (UniqueName: \"kubernetes.io/projected/cec55b8b-af9a-473f-b92b-e3008a596073-kube-api-access-mfv4z\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf\" (UID: \"cec55b8b-af9a-473f-b92b-e3008a596073\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" Dec 02 17:14:10 crc kubenswrapper[4747]: I1202 17:14:10.157386 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cec55b8b-af9a-473f-b92b-e3008a596073-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf\" (UID: \"cec55b8b-af9a-473f-b92b-e3008a596073\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" Dec 02 17:14:10 crc kubenswrapper[4747]: I1202 17:14:10.157764 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cec55b8b-af9a-473f-b92b-e3008a596073-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf\" (UID: \"cec55b8b-af9a-473f-b92b-e3008a596073\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" Dec 02 17:14:10 crc kubenswrapper[4747]: I1202 17:14:10.181675 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfv4z\" (UniqueName: \"kubernetes.io/projected/cec55b8b-af9a-473f-b92b-e3008a596073-kube-api-access-mfv4z\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf\" (UID: \"cec55b8b-af9a-473f-b92b-e3008a596073\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" Dec 02 17:14:10 crc kubenswrapper[4747]: I1202 17:14:10.481134 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" Dec 02 17:14:11 crc kubenswrapper[4747]: I1202 17:14:11.060865 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf"] Dec 02 17:14:11 crc kubenswrapper[4747]: I1202 17:14:11.786435 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" event={"ID":"cec55b8b-af9a-473f-b92b-e3008a596073","Type":"ContainerStarted","Data":"29359f45ed6f0ec99bcc0ef3f26390d7ab3ad3d00a6611349bb57626ee43b911"} Dec 02 17:14:11 crc kubenswrapper[4747]: I1202 17:14:11.786755 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" event={"ID":"cec55b8b-af9a-473f-b92b-e3008a596073","Type":"ContainerStarted","Data":"bc3f57dcb31c92c822e4fd9defb4eeea7d0372b34e4887b6f5d34ca9dd6028a8"} Dec 02 17:14:11 crc kubenswrapper[4747]: I1202 17:14:11.806478 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" podStartSLOduration=2.554556457 podStartE2EDuration="2.806459321s" podCreationTimestamp="2025-12-02 17:14:09 +0000 UTC" firstStartedPulling="2025-12-02 17:14:11.05848097 +0000 UTC m=+1881.585369719" lastFinishedPulling="2025-12-02 17:14:11.310383834 +0000 UTC m=+1881.837272583" observedRunningTime="2025-12-02 17:14:11.800391158 +0000 UTC m=+1882.327279907" watchObservedRunningTime="2025-12-02 17:14:11.806459321 +0000 UTC m=+1882.333348090" Dec 02 17:14:23 crc kubenswrapper[4747]: I1202 17:14:23.760888 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:14:23 crc kubenswrapper[4747]: E1202 17:14:23.761780 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:14:35 crc kubenswrapper[4747]: I1202 17:14:35.764643 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:14:35 crc kubenswrapper[4747]: E1202 17:14:35.766437 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:14:48 crc kubenswrapper[4747]: I1202 17:14:48.053818 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-fq9j2"] Dec 02 17:14:48 crc kubenswrapper[4747]: I1202 17:14:48.067056 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-wjll8"] Dec 02 17:14:48 crc kubenswrapper[4747]: I1202 17:14:48.076201 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-t4cdr"] Dec 02 17:14:48 crc kubenswrapper[4747]: I1202 17:14:48.083293 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-wjll8"] Dec 02 17:14:48 crc kubenswrapper[4747]: I1202 17:14:48.089806 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-t4cdr"] Dec 02 17:14:48 crc kubenswrapper[4747]: I1202 17:14:48.096657 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-fq9j2"] Dec 02 17:14:49 crc kubenswrapper[4747]: I1202 17:14:49.770845 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a171e5ef-7316-4dc0-a40c-9bac98d8f28a" path="/var/lib/kubelet/pods/a171e5ef-7316-4dc0-a40c-9bac98d8f28a/volumes" Dec 02 17:14:49 crc kubenswrapper[4747]: I1202 17:14:49.771834 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="baa3122f-58e4-4adb-b773-1207e27ed51f" path="/var/lib/kubelet/pods/baa3122f-58e4-4adb-b773-1207e27ed51f/volumes" Dec 02 17:14:49 crc kubenswrapper[4747]: I1202 17:14:49.772349 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eff0029d-2d34-42be-b5f0-fd49f3f6b3ac" path="/var/lib/kubelet/pods/eff0029d-2d34-42be-b5f0-fd49f3f6b3ac/volumes" Dec 02 17:14:50 crc kubenswrapper[4747]: I1202 17:14:50.760442 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:14:50 crc kubenswrapper[4747]: E1202 17:14:50.761110 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:14:58 crc kubenswrapper[4747]: I1202 17:14:58.042861 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-8d88-account-create-4b86z"] Dec 02 17:14:58 crc kubenswrapper[4747]: I1202 17:14:58.055714 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-8d88-account-create-4b86z"] Dec 02 17:14:59 crc kubenswrapper[4747]: I1202 17:14:59.770187 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="706d747c-f59b-46d1-aa90-d759e3a0f170" path="/var/lib/kubelet/pods/706d747c-f59b-46d1-aa90-d759e3a0f170/volumes" Dec 02 17:15:00 crc kubenswrapper[4747]: I1202 17:15:00.144819 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c"] Dec 02 17:15:00 crc kubenswrapper[4747]: I1202 17:15:00.146022 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c" Dec 02 17:15:00 crc kubenswrapper[4747]: I1202 17:15:00.148376 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 17:15:00 crc kubenswrapper[4747]: I1202 17:15:00.149559 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 17:15:00 crc kubenswrapper[4747]: I1202 17:15:00.159400 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c"] Dec 02 17:15:00 crc kubenswrapper[4747]: I1202 17:15:00.181499 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ff8708f7-868b-4b10-bef7-db00715aeaad-config-volume\") pod \"collect-profiles-29411595-mfh5c\" (UID: \"ff8708f7-868b-4b10-bef7-db00715aeaad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c" Dec 02 17:15:00 crc kubenswrapper[4747]: I1202 17:15:00.181636 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8th7b\" (UniqueName: \"kubernetes.io/projected/ff8708f7-868b-4b10-bef7-db00715aeaad-kube-api-access-8th7b\") pod \"collect-profiles-29411595-mfh5c\" (UID: \"ff8708f7-868b-4b10-bef7-db00715aeaad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c" Dec 02 17:15:00 crc kubenswrapper[4747]: I1202 17:15:00.181708 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ff8708f7-868b-4b10-bef7-db00715aeaad-secret-volume\") pod \"collect-profiles-29411595-mfh5c\" (UID: \"ff8708f7-868b-4b10-bef7-db00715aeaad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c" Dec 02 17:15:00 crc kubenswrapper[4747]: I1202 17:15:00.284747 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ff8708f7-868b-4b10-bef7-db00715aeaad-secret-volume\") pod \"collect-profiles-29411595-mfh5c\" (UID: \"ff8708f7-868b-4b10-bef7-db00715aeaad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c" Dec 02 17:15:00 crc kubenswrapper[4747]: I1202 17:15:00.285038 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ff8708f7-868b-4b10-bef7-db00715aeaad-config-volume\") pod \"collect-profiles-29411595-mfh5c\" (UID: \"ff8708f7-868b-4b10-bef7-db00715aeaad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c" Dec 02 17:15:00 crc kubenswrapper[4747]: I1202 17:15:00.285234 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8th7b\" (UniqueName: \"kubernetes.io/projected/ff8708f7-868b-4b10-bef7-db00715aeaad-kube-api-access-8th7b\") pod \"collect-profiles-29411595-mfh5c\" (UID: \"ff8708f7-868b-4b10-bef7-db00715aeaad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c" Dec 02 17:15:00 crc kubenswrapper[4747]: I1202 17:15:00.286140 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ff8708f7-868b-4b10-bef7-db00715aeaad-config-volume\") pod \"collect-profiles-29411595-mfh5c\" (UID: \"ff8708f7-868b-4b10-bef7-db00715aeaad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c" Dec 02 17:15:00 crc kubenswrapper[4747]: I1202 17:15:00.291701 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ff8708f7-868b-4b10-bef7-db00715aeaad-secret-volume\") pod \"collect-profiles-29411595-mfh5c\" (UID: \"ff8708f7-868b-4b10-bef7-db00715aeaad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c" Dec 02 17:15:00 crc kubenswrapper[4747]: I1202 17:15:00.302464 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8th7b\" (UniqueName: \"kubernetes.io/projected/ff8708f7-868b-4b10-bef7-db00715aeaad-kube-api-access-8th7b\") pod \"collect-profiles-29411595-mfh5c\" (UID: \"ff8708f7-868b-4b10-bef7-db00715aeaad\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c" Dec 02 17:15:00 crc kubenswrapper[4747]: I1202 17:15:00.474590 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c" Dec 02 17:15:00 crc kubenswrapper[4747]: I1202 17:15:00.904010 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c"] Dec 02 17:15:01 crc kubenswrapper[4747]: I1202 17:15:01.254952 4747 generic.go:334] "Generic (PLEG): container finished" podID="ff8708f7-868b-4b10-bef7-db00715aeaad" containerID="74096b5680e10adf153ca6b4e8afce29a479a5d7f19f832d10383a58d8b8fe1d" exitCode=0 Dec 02 17:15:01 crc kubenswrapper[4747]: I1202 17:15:01.255032 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c" event={"ID":"ff8708f7-868b-4b10-bef7-db00715aeaad","Type":"ContainerDied","Data":"74096b5680e10adf153ca6b4e8afce29a479a5d7f19f832d10383a58d8b8fe1d"} Dec 02 17:15:01 crc kubenswrapper[4747]: I1202 17:15:01.255113 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c" event={"ID":"ff8708f7-868b-4b10-bef7-db00715aeaad","Type":"ContainerStarted","Data":"8660f8bbe10d82de30088a48396261825e7329c1ca7902e0e58ad2cb1f1e1227"} Dec 02 17:15:02 crc kubenswrapper[4747]: I1202 17:15:02.625923 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c" Dec 02 17:15:02 crc kubenswrapper[4747]: I1202 17:15:02.727120 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ff8708f7-868b-4b10-bef7-db00715aeaad-config-volume\") pod \"ff8708f7-868b-4b10-bef7-db00715aeaad\" (UID: \"ff8708f7-868b-4b10-bef7-db00715aeaad\") " Dec 02 17:15:02 crc kubenswrapper[4747]: I1202 17:15:02.727230 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8th7b\" (UniqueName: \"kubernetes.io/projected/ff8708f7-868b-4b10-bef7-db00715aeaad-kube-api-access-8th7b\") pod \"ff8708f7-868b-4b10-bef7-db00715aeaad\" (UID: \"ff8708f7-868b-4b10-bef7-db00715aeaad\") " Dec 02 17:15:02 crc kubenswrapper[4747]: I1202 17:15:02.727334 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ff8708f7-868b-4b10-bef7-db00715aeaad-secret-volume\") pod \"ff8708f7-868b-4b10-bef7-db00715aeaad\" (UID: \"ff8708f7-868b-4b10-bef7-db00715aeaad\") " Dec 02 17:15:02 crc kubenswrapper[4747]: I1202 17:15:02.727871 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff8708f7-868b-4b10-bef7-db00715aeaad-config-volume" (OuterVolumeSpecName: "config-volume") pod "ff8708f7-868b-4b10-bef7-db00715aeaad" (UID: "ff8708f7-868b-4b10-bef7-db00715aeaad"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:15:02 crc kubenswrapper[4747]: I1202 17:15:02.732683 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff8708f7-868b-4b10-bef7-db00715aeaad-kube-api-access-8th7b" (OuterVolumeSpecName: "kube-api-access-8th7b") pod "ff8708f7-868b-4b10-bef7-db00715aeaad" (UID: "ff8708f7-868b-4b10-bef7-db00715aeaad"). InnerVolumeSpecName "kube-api-access-8th7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:15:02 crc kubenswrapper[4747]: I1202 17:15:02.732761 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff8708f7-868b-4b10-bef7-db00715aeaad-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ff8708f7-868b-4b10-bef7-db00715aeaad" (UID: "ff8708f7-868b-4b10-bef7-db00715aeaad"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:15:02 crc kubenswrapper[4747]: I1202 17:15:02.830413 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8th7b\" (UniqueName: \"kubernetes.io/projected/ff8708f7-868b-4b10-bef7-db00715aeaad-kube-api-access-8th7b\") on node \"crc\" DevicePath \"\"" Dec 02 17:15:02 crc kubenswrapper[4747]: I1202 17:15:02.830484 4747 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ff8708f7-868b-4b10-bef7-db00715aeaad-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 17:15:02 crc kubenswrapper[4747]: I1202 17:15:02.830498 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ff8708f7-868b-4b10-bef7-db00715aeaad-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 17:15:03 crc kubenswrapper[4747]: I1202 17:15:03.275311 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c" event={"ID":"ff8708f7-868b-4b10-bef7-db00715aeaad","Type":"ContainerDied","Data":"8660f8bbe10d82de30088a48396261825e7329c1ca7902e0e58ad2cb1f1e1227"} Dec 02 17:15:03 crc kubenswrapper[4747]: I1202 17:15:03.275362 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8660f8bbe10d82de30088a48396261825e7329c1ca7902e0e58ad2cb1f1e1227" Dec 02 17:15:03 crc kubenswrapper[4747]: I1202 17:15:03.275395 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c" Dec 02 17:15:03 crc kubenswrapper[4747]: I1202 17:15:03.690533 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7"] Dec 02 17:15:03 crc kubenswrapper[4747]: I1202 17:15:03.698624 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411550-2s6h7"] Dec 02 17:15:03 crc kubenswrapper[4747]: I1202 17:15:03.772266 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2530933f-b67d-4f7e-93c2-3edf83af285b" path="/var/lib/kubelet/pods/2530933f-b67d-4f7e-93c2-3edf83af285b/volumes" Dec 02 17:15:05 crc kubenswrapper[4747]: I1202 17:15:05.760720 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:15:05 crc kubenswrapper[4747]: E1202 17:15:05.761304 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:15:07 crc kubenswrapper[4747]: I1202 17:15:07.213655 4747 scope.go:117] "RemoveContainer" containerID="efa384a3d44fea1d05f430229d4947fcb4ff59fa8b4abd0ef6bb470a6324743d" Dec 02 17:15:07 crc kubenswrapper[4747]: I1202 17:15:07.234841 4747 scope.go:117] "RemoveContainer" containerID="ef4da0d72c8611ea8a36925d6edd4b9b22a38bbed46f7f736dd2a1c21950d36a" Dec 02 17:15:07 crc kubenswrapper[4747]: I1202 17:15:07.284325 4747 scope.go:117] "RemoveContainer" containerID="92cc0ab81906b40f358235d2e16f9247f1bf8e58d81c649e655fb9cb6d5072fa" Dec 02 17:15:07 crc kubenswrapper[4747]: I1202 17:15:07.338212 4747 scope.go:117] "RemoveContainer" containerID="ba438233e8df9edb1202f44f7b3e1ec5dfa47560bef619deb81b17a4767a4968" Dec 02 17:15:07 crc kubenswrapper[4747]: I1202 17:15:07.409212 4747 scope.go:117] "RemoveContainer" containerID="b0d760da09ed4cc286a9e91941b45b8d460c2339b89e2b3b7a8a3583545a1567" Dec 02 17:15:08 crc kubenswrapper[4747]: I1202 17:15:08.044967 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-baeb-account-create-vmvkj"] Dec 02 17:15:08 crc kubenswrapper[4747]: I1202 17:15:08.053657 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-6e75-account-create-rs289"] Dec 02 17:15:08 crc kubenswrapper[4747]: I1202 17:15:08.062784 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-baeb-account-create-vmvkj"] Dec 02 17:15:08 crc kubenswrapper[4747]: I1202 17:15:08.072483 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-6e75-account-create-rs289"] Dec 02 17:15:09 crc kubenswrapper[4747]: I1202 17:15:09.771213 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e07ad04-23dd-4c42-bce3-0131ec6ad8f7" path="/var/lib/kubelet/pods/5e07ad04-23dd-4c42-bce3-0131ec6ad8f7/volumes" Dec 02 17:15:09 crc kubenswrapper[4747]: I1202 17:15:09.772046 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae346fbe-0ec8-4b40-9264-afca3b7110ba" path="/var/lib/kubelet/pods/ae346fbe-0ec8-4b40-9264-afca3b7110ba/volumes" Dec 02 17:15:16 crc kubenswrapper[4747]: I1202 17:15:16.761349 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:15:16 crc kubenswrapper[4747]: E1202 17:15:16.762116 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:15:18 crc kubenswrapper[4747]: I1202 17:15:18.167624 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9kcch"] Dec 02 17:15:18 crc kubenswrapper[4747]: E1202 17:15:18.168585 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff8708f7-868b-4b10-bef7-db00715aeaad" containerName="collect-profiles" Dec 02 17:15:18 crc kubenswrapper[4747]: I1202 17:15:18.168609 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff8708f7-868b-4b10-bef7-db00715aeaad" containerName="collect-profiles" Dec 02 17:15:18 crc kubenswrapper[4747]: I1202 17:15:18.168880 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff8708f7-868b-4b10-bef7-db00715aeaad" containerName="collect-profiles" Dec 02 17:15:18 crc kubenswrapper[4747]: I1202 17:15:18.170791 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9kcch" Dec 02 17:15:18 crc kubenswrapper[4747]: I1202 17:15:18.178523 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9kcch"] Dec 02 17:15:18 crc kubenswrapper[4747]: I1202 17:15:18.321659 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/959719b4-21e9-4f58-8b18-f69c5126b232-catalog-content\") pod \"redhat-operators-9kcch\" (UID: \"959719b4-21e9-4f58-8b18-f69c5126b232\") " pod="openshift-marketplace/redhat-operators-9kcch" Dec 02 17:15:18 crc kubenswrapper[4747]: I1202 17:15:18.322012 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wgzj\" (UniqueName: \"kubernetes.io/projected/959719b4-21e9-4f58-8b18-f69c5126b232-kube-api-access-7wgzj\") pod \"redhat-operators-9kcch\" (UID: \"959719b4-21e9-4f58-8b18-f69c5126b232\") " pod="openshift-marketplace/redhat-operators-9kcch" Dec 02 17:15:18 crc kubenswrapper[4747]: I1202 17:15:18.322139 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/959719b4-21e9-4f58-8b18-f69c5126b232-utilities\") pod \"redhat-operators-9kcch\" (UID: \"959719b4-21e9-4f58-8b18-f69c5126b232\") " pod="openshift-marketplace/redhat-operators-9kcch" Dec 02 17:15:18 crc kubenswrapper[4747]: I1202 17:15:18.419994 4747 generic.go:334] "Generic (PLEG): container finished" podID="cec55b8b-af9a-473f-b92b-e3008a596073" containerID="29359f45ed6f0ec99bcc0ef3f26390d7ab3ad3d00a6611349bb57626ee43b911" exitCode=0 Dec 02 17:15:18 crc kubenswrapper[4747]: I1202 17:15:18.420037 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" event={"ID":"cec55b8b-af9a-473f-b92b-e3008a596073","Type":"ContainerDied","Data":"29359f45ed6f0ec99bcc0ef3f26390d7ab3ad3d00a6611349bb57626ee43b911"} Dec 02 17:15:18 crc kubenswrapper[4747]: I1202 17:15:18.424422 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/959719b4-21e9-4f58-8b18-f69c5126b232-catalog-content\") pod \"redhat-operators-9kcch\" (UID: \"959719b4-21e9-4f58-8b18-f69c5126b232\") " pod="openshift-marketplace/redhat-operators-9kcch" Dec 02 17:15:18 crc kubenswrapper[4747]: I1202 17:15:18.424475 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wgzj\" (UniqueName: \"kubernetes.io/projected/959719b4-21e9-4f58-8b18-f69c5126b232-kube-api-access-7wgzj\") pod \"redhat-operators-9kcch\" (UID: \"959719b4-21e9-4f58-8b18-f69c5126b232\") " pod="openshift-marketplace/redhat-operators-9kcch" Dec 02 17:15:18 crc kubenswrapper[4747]: I1202 17:15:18.424537 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/959719b4-21e9-4f58-8b18-f69c5126b232-utilities\") pod \"redhat-operators-9kcch\" (UID: \"959719b4-21e9-4f58-8b18-f69c5126b232\") " pod="openshift-marketplace/redhat-operators-9kcch" Dec 02 17:15:18 crc kubenswrapper[4747]: I1202 17:15:18.425167 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/959719b4-21e9-4f58-8b18-f69c5126b232-utilities\") pod \"redhat-operators-9kcch\" (UID: \"959719b4-21e9-4f58-8b18-f69c5126b232\") " pod="openshift-marketplace/redhat-operators-9kcch" Dec 02 17:15:18 crc kubenswrapper[4747]: I1202 17:15:18.425170 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/959719b4-21e9-4f58-8b18-f69c5126b232-catalog-content\") pod \"redhat-operators-9kcch\" (UID: \"959719b4-21e9-4f58-8b18-f69c5126b232\") " pod="openshift-marketplace/redhat-operators-9kcch" Dec 02 17:15:18 crc kubenswrapper[4747]: I1202 17:15:18.445850 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wgzj\" (UniqueName: \"kubernetes.io/projected/959719b4-21e9-4f58-8b18-f69c5126b232-kube-api-access-7wgzj\") pod \"redhat-operators-9kcch\" (UID: \"959719b4-21e9-4f58-8b18-f69c5126b232\") " pod="openshift-marketplace/redhat-operators-9kcch" Dec 02 17:15:18 crc kubenswrapper[4747]: I1202 17:15:18.498728 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9kcch" Dec 02 17:15:19 crc kubenswrapper[4747]: I1202 17:15:19.029031 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9kcch"] Dec 02 17:15:19 crc kubenswrapper[4747]: I1202 17:15:19.429320 4747 generic.go:334] "Generic (PLEG): container finished" podID="959719b4-21e9-4f58-8b18-f69c5126b232" containerID="0fb772fbd28733a471f120ce830679689dccd8da2155bef0d872590539bd86fb" exitCode=0 Dec 02 17:15:19 crc kubenswrapper[4747]: I1202 17:15:19.429380 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9kcch" event={"ID":"959719b4-21e9-4f58-8b18-f69c5126b232","Type":"ContainerDied","Data":"0fb772fbd28733a471f120ce830679689dccd8da2155bef0d872590539bd86fb"} Dec 02 17:15:19 crc kubenswrapper[4747]: I1202 17:15:19.429608 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9kcch" event={"ID":"959719b4-21e9-4f58-8b18-f69c5126b232","Type":"ContainerStarted","Data":"7ecd6cf4d2598176356f7909a21dba8251d780ee3db731f54c5134ed698b62b1"} Dec 02 17:15:19 crc kubenswrapper[4747]: I1202 17:15:19.971810 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.112542 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cec55b8b-af9a-473f-b92b-e3008a596073-ssh-key\") pod \"cec55b8b-af9a-473f-b92b-e3008a596073\" (UID: \"cec55b8b-af9a-473f-b92b-e3008a596073\") " Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.112677 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cec55b8b-af9a-473f-b92b-e3008a596073-inventory\") pod \"cec55b8b-af9a-473f-b92b-e3008a596073\" (UID: \"cec55b8b-af9a-473f-b92b-e3008a596073\") " Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.112782 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfv4z\" (UniqueName: \"kubernetes.io/projected/cec55b8b-af9a-473f-b92b-e3008a596073-kube-api-access-mfv4z\") pod \"cec55b8b-af9a-473f-b92b-e3008a596073\" (UID: \"cec55b8b-af9a-473f-b92b-e3008a596073\") " Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.129736 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cec55b8b-af9a-473f-b92b-e3008a596073-kube-api-access-mfv4z" (OuterVolumeSpecName: "kube-api-access-mfv4z") pod "cec55b8b-af9a-473f-b92b-e3008a596073" (UID: "cec55b8b-af9a-473f-b92b-e3008a596073"). InnerVolumeSpecName "kube-api-access-mfv4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.139414 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cec55b8b-af9a-473f-b92b-e3008a596073-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cec55b8b-af9a-473f-b92b-e3008a596073" (UID: "cec55b8b-af9a-473f-b92b-e3008a596073"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.141042 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cec55b8b-af9a-473f-b92b-e3008a596073-inventory" (OuterVolumeSpecName: "inventory") pod "cec55b8b-af9a-473f-b92b-e3008a596073" (UID: "cec55b8b-af9a-473f-b92b-e3008a596073"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.214961 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cec55b8b-af9a-473f-b92b-e3008a596073-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.214996 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cec55b8b-af9a-473f-b92b-e3008a596073-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.215007 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfv4z\" (UniqueName: \"kubernetes.io/projected/cec55b8b-af9a-473f-b92b-e3008a596073-kube-api-access-mfv4z\") on node \"crc\" DevicePath \"\"" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.438447 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" event={"ID":"cec55b8b-af9a-473f-b92b-e3008a596073","Type":"ContainerDied","Data":"bc3f57dcb31c92c822e4fd9defb4eeea7d0372b34e4887b6f5d34ca9dd6028a8"} Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.438687 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc3f57dcb31c92c822e4fd9defb4eeea7d0372b34e4887b6f5d34ca9dd6028a8" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.438703 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.539989 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct"] Dec 02 17:15:20 crc kubenswrapper[4747]: E1202 17:15:20.540528 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cec55b8b-af9a-473f-b92b-e3008a596073" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.540548 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="cec55b8b-af9a-473f-b92b-e3008a596073" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.540738 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="cec55b8b-af9a-473f-b92b-e3008a596073" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.541399 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.547812 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct"] Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.548947 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.548988 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.549104 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.549290 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mt9sq" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.724764 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca7ca1bc-1500-4246-b79c-6566ec5f0281-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-l7fct\" (UID: \"ca7ca1bc-1500-4246-b79c-6566ec5f0281\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.724862 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmsst\" (UniqueName: \"kubernetes.io/projected/ca7ca1bc-1500-4246-b79c-6566ec5f0281-kube-api-access-qmsst\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-l7fct\" (UID: \"ca7ca1bc-1500-4246-b79c-6566ec5f0281\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.725202 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca7ca1bc-1500-4246-b79c-6566ec5f0281-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-l7fct\" (UID: \"ca7ca1bc-1500-4246-b79c-6566ec5f0281\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.827057 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca7ca1bc-1500-4246-b79c-6566ec5f0281-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-l7fct\" (UID: \"ca7ca1bc-1500-4246-b79c-6566ec5f0281\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.827223 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca7ca1bc-1500-4246-b79c-6566ec5f0281-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-l7fct\" (UID: \"ca7ca1bc-1500-4246-b79c-6566ec5f0281\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.827258 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmsst\" (UniqueName: \"kubernetes.io/projected/ca7ca1bc-1500-4246-b79c-6566ec5f0281-kube-api-access-qmsst\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-l7fct\" (UID: \"ca7ca1bc-1500-4246-b79c-6566ec5f0281\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.831733 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca7ca1bc-1500-4246-b79c-6566ec5f0281-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-l7fct\" (UID: \"ca7ca1bc-1500-4246-b79c-6566ec5f0281\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.832082 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca7ca1bc-1500-4246-b79c-6566ec5f0281-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-l7fct\" (UID: \"ca7ca1bc-1500-4246-b79c-6566ec5f0281\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.857452 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmsst\" (UniqueName: \"kubernetes.io/projected/ca7ca1bc-1500-4246-b79c-6566ec5f0281-kube-api-access-qmsst\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-l7fct\" (UID: \"ca7ca1bc-1500-4246-b79c-6566ec5f0281\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" Dec 02 17:15:20 crc kubenswrapper[4747]: I1202 17:15:20.871585 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" Dec 02 17:15:21 crc kubenswrapper[4747]: I1202 17:15:21.466256 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9kcch" event={"ID":"959719b4-21e9-4f58-8b18-f69c5126b232","Type":"ContainerStarted","Data":"b97d6e6ba0f45fd6234dae6aa4191db2014c1309f9ffd95a41fdebf981c0bfb1"} Dec 02 17:15:21 crc kubenswrapper[4747]: I1202 17:15:21.515009 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct"] Dec 02 17:15:21 crc kubenswrapper[4747]: W1202 17:15:21.519973 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca7ca1bc_1500_4246_b79c_6566ec5f0281.slice/crio-098b2f0f623af7c67283700109b04c8a50f8c4fe54198983ab1090e86ddc5bb7 WatchSource:0}: Error finding container 098b2f0f623af7c67283700109b04c8a50f8c4fe54198983ab1090e86ddc5bb7: Status 404 returned error can't find the container with id 098b2f0f623af7c67283700109b04c8a50f8c4fe54198983ab1090e86ddc5bb7 Dec 02 17:15:22 crc kubenswrapper[4747]: I1202 17:15:22.477724 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" event={"ID":"ca7ca1bc-1500-4246-b79c-6566ec5f0281","Type":"ContainerStarted","Data":"ee12d856241cab264d63f9dd057097442b935a8b9860212cf3f64a846053c515"} Dec 02 17:15:22 crc kubenswrapper[4747]: I1202 17:15:22.478132 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" event={"ID":"ca7ca1bc-1500-4246-b79c-6566ec5f0281","Type":"ContainerStarted","Data":"098b2f0f623af7c67283700109b04c8a50f8c4fe54198983ab1090e86ddc5bb7"} Dec 02 17:15:22 crc kubenswrapper[4747]: I1202 17:15:22.501372 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" podStartSLOduration=2.097556073 podStartE2EDuration="2.501351433s" podCreationTimestamp="2025-12-02 17:15:20 +0000 UTC" firstStartedPulling="2025-12-02 17:15:21.522732643 +0000 UTC m=+1952.049621392" lastFinishedPulling="2025-12-02 17:15:21.926528003 +0000 UTC m=+1952.453416752" observedRunningTime="2025-12-02 17:15:22.49424672 +0000 UTC m=+1953.021135469" watchObservedRunningTime="2025-12-02 17:15:22.501351433 +0000 UTC m=+1953.028240182" Dec 02 17:15:23 crc kubenswrapper[4747]: I1202 17:15:23.618624 4747 generic.go:334] "Generic (PLEG): container finished" podID="959719b4-21e9-4f58-8b18-f69c5126b232" containerID="b97d6e6ba0f45fd6234dae6aa4191db2014c1309f9ffd95a41fdebf981c0bfb1" exitCode=0 Dec 02 17:15:23 crc kubenswrapper[4747]: I1202 17:15:23.618771 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9kcch" event={"ID":"959719b4-21e9-4f58-8b18-f69c5126b232","Type":"ContainerDied","Data":"b97d6e6ba0f45fd6234dae6aa4191db2014c1309f9ffd95a41fdebf981c0bfb1"} Dec 02 17:15:24 crc kubenswrapper[4747]: I1202 17:15:24.630298 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9kcch" event={"ID":"959719b4-21e9-4f58-8b18-f69c5126b232","Type":"ContainerStarted","Data":"300458d9206d30fcafba1c60202023ee2a94ea2593adf017e9b6e1e1c0f015ae"} Dec 02 17:15:24 crc kubenswrapper[4747]: I1202 17:15:24.670680 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9kcch" podStartSLOduration=2.0025102869999998 podStartE2EDuration="6.67065583s" podCreationTimestamp="2025-12-02 17:15:18 +0000 UTC" firstStartedPulling="2025-12-02 17:15:19.43080631 +0000 UTC m=+1949.957695059" lastFinishedPulling="2025-12-02 17:15:24.098951853 +0000 UTC m=+1954.625840602" observedRunningTime="2025-12-02 17:15:24.664257318 +0000 UTC m=+1955.191146067" watchObservedRunningTime="2025-12-02 17:15:24.67065583 +0000 UTC m=+1955.197544579" Dec 02 17:15:27 crc kubenswrapper[4747]: I1202 17:15:27.659305 4747 generic.go:334] "Generic (PLEG): container finished" podID="ca7ca1bc-1500-4246-b79c-6566ec5f0281" containerID="ee12d856241cab264d63f9dd057097442b935a8b9860212cf3f64a846053c515" exitCode=0 Dec 02 17:15:27 crc kubenswrapper[4747]: I1202 17:15:27.659442 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" event={"ID":"ca7ca1bc-1500-4246-b79c-6566ec5f0281","Type":"ContainerDied","Data":"ee12d856241cab264d63f9dd057097442b935a8b9860212cf3f64a846053c515"} Dec 02 17:15:28 crc kubenswrapper[4747]: I1202 17:15:28.499867 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9kcch" Dec 02 17:15:28 crc kubenswrapper[4747]: I1202 17:15:28.499938 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9kcch" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.112302 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.238699 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca7ca1bc-1500-4246-b79c-6566ec5f0281-ssh-key\") pod \"ca7ca1bc-1500-4246-b79c-6566ec5f0281\" (UID: \"ca7ca1bc-1500-4246-b79c-6566ec5f0281\") " Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.238848 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca7ca1bc-1500-4246-b79c-6566ec5f0281-inventory\") pod \"ca7ca1bc-1500-4246-b79c-6566ec5f0281\" (UID: \"ca7ca1bc-1500-4246-b79c-6566ec5f0281\") " Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.239029 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmsst\" (UniqueName: \"kubernetes.io/projected/ca7ca1bc-1500-4246-b79c-6566ec5f0281-kube-api-access-qmsst\") pod \"ca7ca1bc-1500-4246-b79c-6566ec5f0281\" (UID: \"ca7ca1bc-1500-4246-b79c-6566ec5f0281\") " Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.255583 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca7ca1bc-1500-4246-b79c-6566ec5f0281-kube-api-access-qmsst" (OuterVolumeSpecName: "kube-api-access-qmsst") pod "ca7ca1bc-1500-4246-b79c-6566ec5f0281" (UID: "ca7ca1bc-1500-4246-b79c-6566ec5f0281"). InnerVolumeSpecName "kube-api-access-qmsst". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.273275 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca7ca1bc-1500-4246-b79c-6566ec5f0281-inventory" (OuterVolumeSpecName: "inventory") pod "ca7ca1bc-1500-4246-b79c-6566ec5f0281" (UID: "ca7ca1bc-1500-4246-b79c-6566ec5f0281"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.286019 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca7ca1bc-1500-4246-b79c-6566ec5f0281-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ca7ca1bc-1500-4246-b79c-6566ec5f0281" (UID: "ca7ca1bc-1500-4246-b79c-6566ec5f0281"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.341188 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca7ca1bc-1500-4246-b79c-6566ec5f0281-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.341224 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca7ca1bc-1500-4246-b79c-6566ec5f0281-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.341234 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmsst\" (UniqueName: \"kubernetes.io/projected/ca7ca1bc-1500-4246-b79c-6566ec5f0281-kube-api-access-qmsst\") on node \"crc\" DevicePath \"\"" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.547967 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9kcch" podUID="959719b4-21e9-4f58-8b18-f69c5126b232" containerName="registry-server" probeResult="failure" output=< Dec 02 17:15:29 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Dec 02 17:15:29 crc kubenswrapper[4747]: > Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.678010 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" event={"ID":"ca7ca1bc-1500-4246-b79c-6566ec5f0281","Type":"ContainerDied","Data":"098b2f0f623af7c67283700109b04c8a50f8c4fe54198983ab1090e86ddc5bb7"} Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.678310 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="098b2f0f623af7c67283700109b04c8a50f8c4fe54198983ab1090e86ddc5bb7" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.678120 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-l7fct" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.781248 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk"] Dec 02 17:15:29 crc kubenswrapper[4747]: E1202 17:15:29.781673 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca7ca1bc-1500-4246-b79c-6566ec5f0281" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.781693 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca7ca1bc-1500-4246-b79c-6566ec5f0281" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.781860 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca7ca1bc-1500-4246-b79c-6566ec5f0281" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.782491 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.784416 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.784492 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.785406 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.786340 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mt9sq" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.803583 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk"] Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.850468 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49daddcc-4224-45ac-b0ba-cb24ddd68dee-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-65pqk\" (UID: \"49daddcc-4224-45ac-b0ba-cb24ddd68dee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.851085 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49daddcc-4224-45ac-b0ba-cb24ddd68dee-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-65pqk\" (UID: \"49daddcc-4224-45ac-b0ba-cb24ddd68dee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.851163 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwmnn\" (UniqueName: \"kubernetes.io/projected/49daddcc-4224-45ac-b0ba-cb24ddd68dee-kube-api-access-rwmnn\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-65pqk\" (UID: \"49daddcc-4224-45ac-b0ba-cb24ddd68dee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.952592 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49daddcc-4224-45ac-b0ba-cb24ddd68dee-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-65pqk\" (UID: \"49daddcc-4224-45ac-b0ba-cb24ddd68dee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.952679 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwmnn\" (UniqueName: \"kubernetes.io/projected/49daddcc-4224-45ac-b0ba-cb24ddd68dee-kube-api-access-rwmnn\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-65pqk\" (UID: \"49daddcc-4224-45ac-b0ba-cb24ddd68dee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.952752 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49daddcc-4224-45ac-b0ba-cb24ddd68dee-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-65pqk\" (UID: \"49daddcc-4224-45ac-b0ba-cb24ddd68dee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.960804 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49daddcc-4224-45ac-b0ba-cb24ddd68dee-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-65pqk\" (UID: \"49daddcc-4224-45ac-b0ba-cb24ddd68dee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.962826 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49daddcc-4224-45ac-b0ba-cb24ddd68dee-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-65pqk\" (UID: \"49daddcc-4224-45ac-b0ba-cb24ddd68dee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" Dec 02 17:15:29 crc kubenswrapper[4747]: I1202 17:15:29.972707 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwmnn\" (UniqueName: \"kubernetes.io/projected/49daddcc-4224-45ac-b0ba-cb24ddd68dee-kube-api-access-rwmnn\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-65pqk\" (UID: \"49daddcc-4224-45ac-b0ba-cb24ddd68dee\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" Dec 02 17:15:30 crc kubenswrapper[4747]: I1202 17:15:30.109014 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" Dec 02 17:15:30 crc kubenswrapper[4747]: I1202 17:15:30.634338 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk"] Dec 02 17:15:30 crc kubenswrapper[4747]: W1202 17:15:30.637482 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49daddcc_4224_45ac_b0ba_cb24ddd68dee.slice/crio-2400369af786a1a2e5ecb7e5fe9c2a59d83663a1c863ff56bf73550e5bdc5822 WatchSource:0}: Error finding container 2400369af786a1a2e5ecb7e5fe9c2a59d83663a1c863ff56bf73550e5bdc5822: Status 404 returned error can't find the container with id 2400369af786a1a2e5ecb7e5fe9c2a59d83663a1c863ff56bf73550e5bdc5822 Dec 02 17:15:30 crc kubenswrapper[4747]: I1202 17:15:30.688395 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" event={"ID":"49daddcc-4224-45ac-b0ba-cb24ddd68dee","Type":"ContainerStarted","Data":"2400369af786a1a2e5ecb7e5fe9c2a59d83663a1c863ff56bf73550e5bdc5822"} Dec 02 17:15:31 crc kubenswrapper[4747]: I1202 17:15:31.698557 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" event={"ID":"49daddcc-4224-45ac-b0ba-cb24ddd68dee","Type":"ContainerStarted","Data":"f7cbd728c806fd7d61d4e0f78045e35a0b4acbe64ef413a00064e5ef328fb491"} Dec 02 17:15:31 crc kubenswrapper[4747]: I1202 17:15:31.728045 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" podStartSLOduration=2.550003052 podStartE2EDuration="2.72802251s" podCreationTimestamp="2025-12-02 17:15:29 +0000 UTC" firstStartedPulling="2025-12-02 17:15:30.64048914 +0000 UTC m=+1961.167377899" lastFinishedPulling="2025-12-02 17:15:30.818508608 +0000 UTC m=+1961.345397357" observedRunningTime="2025-12-02 17:15:31.722094431 +0000 UTC m=+1962.248983200" watchObservedRunningTime="2025-12-02 17:15:31.72802251 +0000 UTC m=+1962.254911269" Dec 02 17:15:31 crc kubenswrapper[4747]: I1202 17:15:31.760528 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:15:31 crc kubenswrapper[4747]: E1202 17:15:31.760808 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:15:38 crc kubenswrapper[4747]: I1202 17:15:38.553393 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9kcch" Dec 02 17:15:38 crc kubenswrapper[4747]: I1202 17:15:38.608212 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9kcch" Dec 02 17:15:38 crc kubenswrapper[4747]: I1202 17:15:38.795601 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9kcch"] Dec 02 17:15:39 crc kubenswrapper[4747]: I1202 17:15:39.779227 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9kcch" podUID="959719b4-21e9-4f58-8b18-f69c5126b232" containerName="registry-server" containerID="cri-o://300458d9206d30fcafba1c60202023ee2a94ea2593adf017e9b6e1e1c0f015ae" gracePeriod=2 Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.220518 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9kcch" Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.417884 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/959719b4-21e9-4f58-8b18-f69c5126b232-utilities\") pod \"959719b4-21e9-4f58-8b18-f69c5126b232\" (UID: \"959719b4-21e9-4f58-8b18-f69c5126b232\") " Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.418328 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7wgzj\" (UniqueName: \"kubernetes.io/projected/959719b4-21e9-4f58-8b18-f69c5126b232-kube-api-access-7wgzj\") pod \"959719b4-21e9-4f58-8b18-f69c5126b232\" (UID: \"959719b4-21e9-4f58-8b18-f69c5126b232\") " Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.418476 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/959719b4-21e9-4f58-8b18-f69c5126b232-catalog-content\") pod \"959719b4-21e9-4f58-8b18-f69c5126b232\" (UID: \"959719b4-21e9-4f58-8b18-f69c5126b232\") " Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.418572 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/959719b4-21e9-4f58-8b18-f69c5126b232-utilities" (OuterVolumeSpecName: "utilities") pod "959719b4-21e9-4f58-8b18-f69c5126b232" (UID: "959719b4-21e9-4f58-8b18-f69c5126b232"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.419013 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/959719b4-21e9-4f58-8b18-f69c5126b232-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.423928 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/959719b4-21e9-4f58-8b18-f69c5126b232-kube-api-access-7wgzj" (OuterVolumeSpecName: "kube-api-access-7wgzj") pod "959719b4-21e9-4f58-8b18-f69c5126b232" (UID: "959719b4-21e9-4f58-8b18-f69c5126b232"). InnerVolumeSpecName "kube-api-access-7wgzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.522147 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7wgzj\" (UniqueName: \"kubernetes.io/projected/959719b4-21e9-4f58-8b18-f69c5126b232-kube-api-access-7wgzj\") on node \"crc\" DevicePath \"\"" Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.532276 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/959719b4-21e9-4f58-8b18-f69c5126b232-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "959719b4-21e9-4f58-8b18-f69c5126b232" (UID: "959719b4-21e9-4f58-8b18-f69c5126b232"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.623067 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/959719b4-21e9-4f58-8b18-f69c5126b232-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.789831 4747 generic.go:334] "Generic (PLEG): container finished" podID="959719b4-21e9-4f58-8b18-f69c5126b232" containerID="300458d9206d30fcafba1c60202023ee2a94ea2593adf017e9b6e1e1c0f015ae" exitCode=0 Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.789879 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9kcch" Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.789878 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9kcch" event={"ID":"959719b4-21e9-4f58-8b18-f69c5126b232","Type":"ContainerDied","Data":"300458d9206d30fcafba1c60202023ee2a94ea2593adf017e9b6e1e1c0f015ae"} Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.789956 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9kcch" event={"ID":"959719b4-21e9-4f58-8b18-f69c5126b232","Type":"ContainerDied","Data":"7ecd6cf4d2598176356f7909a21dba8251d780ee3db731f54c5134ed698b62b1"} Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.789978 4747 scope.go:117] "RemoveContainer" containerID="300458d9206d30fcafba1c60202023ee2a94ea2593adf017e9b6e1e1c0f015ae" Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.820698 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9kcch"] Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.823031 4747 scope.go:117] "RemoveContainer" containerID="b97d6e6ba0f45fd6234dae6aa4191db2014c1309f9ffd95a41fdebf981c0bfb1" Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.830323 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9kcch"] Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.846206 4747 scope.go:117] "RemoveContainer" containerID="0fb772fbd28733a471f120ce830679689dccd8da2155bef0d872590539bd86fb" Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.884540 4747 scope.go:117] "RemoveContainer" containerID="300458d9206d30fcafba1c60202023ee2a94ea2593adf017e9b6e1e1c0f015ae" Dec 02 17:15:40 crc kubenswrapper[4747]: E1202 17:15:40.885080 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"300458d9206d30fcafba1c60202023ee2a94ea2593adf017e9b6e1e1c0f015ae\": container with ID starting with 300458d9206d30fcafba1c60202023ee2a94ea2593adf017e9b6e1e1c0f015ae not found: ID does not exist" containerID="300458d9206d30fcafba1c60202023ee2a94ea2593adf017e9b6e1e1c0f015ae" Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.885141 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"300458d9206d30fcafba1c60202023ee2a94ea2593adf017e9b6e1e1c0f015ae"} err="failed to get container status \"300458d9206d30fcafba1c60202023ee2a94ea2593adf017e9b6e1e1c0f015ae\": rpc error: code = NotFound desc = could not find container \"300458d9206d30fcafba1c60202023ee2a94ea2593adf017e9b6e1e1c0f015ae\": container with ID starting with 300458d9206d30fcafba1c60202023ee2a94ea2593adf017e9b6e1e1c0f015ae not found: ID does not exist" Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.885170 4747 scope.go:117] "RemoveContainer" containerID="b97d6e6ba0f45fd6234dae6aa4191db2014c1309f9ffd95a41fdebf981c0bfb1" Dec 02 17:15:40 crc kubenswrapper[4747]: E1202 17:15:40.885517 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b97d6e6ba0f45fd6234dae6aa4191db2014c1309f9ffd95a41fdebf981c0bfb1\": container with ID starting with b97d6e6ba0f45fd6234dae6aa4191db2014c1309f9ffd95a41fdebf981c0bfb1 not found: ID does not exist" containerID="b97d6e6ba0f45fd6234dae6aa4191db2014c1309f9ffd95a41fdebf981c0bfb1" Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.885559 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b97d6e6ba0f45fd6234dae6aa4191db2014c1309f9ffd95a41fdebf981c0bfb1"} err="failed to get container status \"b97d6e6ba0f45fd6234dae6aa4191db2014c1309f9ffd95a41fdebf981c0bfb1\": rpc error: code = NotFound desc = could not find container \"b97d6e6ba0f45fd6234dae6aa4191db2014c1309f9ffd95a41fdebf981c0bfb1\": container with ID starting with b97d6e6ba0f45fd6234dae6aa4191db2014c1309f9ffd95a41fdebf981c0bfb1 not found: ID does not exist" Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.885586 4747 scope.go:117] "RemoveContainer" containerID="0fb772fbd28733a471f120ce830679689dccd8da2155bef0d872590539bd86fb" Dec 02 17:15:40 crc kubenswrapper[4747]: E1202 17:15:40.885893 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0fb772fbd28733a471f120ce830679689dccd8da2155bef0d872590539bd86fb\": container with ID starting with 0fb772fbd28733a471f120ce830679689dccd8da2155bef0d872590539bd86fb not found: ID does not exist" containerID="0fb772fbd28733a471f120ce830679689dccd8da2155bef0d872590539bd86fb" Dec 02 17:15:40 crc kubenswrapper[4747]: I1202 17:15:40.885941 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fb772fbd28733a471f120ce830679689dccd8da2155bef0d872590539bd86fb"} err="failed to get container status \"0fb772fbd28733a471f120ce830679689dccd8da2155bef0d872590539bd86fb\": rpc error: code = NotFound desc = could not find container \"0fb772fbd28733a471f120ce830679689dccd8da2155bef0d872590539bd86fb\": container with ID starting with 0fb772fbd28733a471f120ce830679689dccd8da2155bef0d872590539bd86fb not found: ID does not exist" Dec 02 17:15:41 crc kubenswrapper[4747]: I1202 17:15:41.057116 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-rtm6n"] Dec 02 17:15:41 crc kubenswrapper[4747]: I1202 17:15:41.076947 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-rtm6n"] Dec 02 17:15:41 crc kubenswrapper[4747]: I1202 17:15:41.771793 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="343064c0-a3ec-4048-93a1-e76e9f648b17" path="/var/lib/kubelet/pods/343064c0-a3ec-4048-93a1-e76e9f648b17/volumes" Dec 02 17:15:41 crc kubenswrapper[4747]: I1202 17:15:41.772600 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="959719b4-21e9-4f58-8b18-f69c5126b232" path="/var/lib/kubelet/pods/959719b4-21e9-4f58-8b18-f69c5126b232/volumes" Dec 02 17:15:42 crc kubenswrapper[4747]: I1202 17:15:42.760696 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:15:42 crc kubenswrapper[4747]: E1202 17:15:42.761027 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:15:55 crc kubenswrapper[4747]: I1202 17:15:55.760765 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:15:55 crc kubenswrapper[4747]: E1202 17:15:55.761667 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:16:05 crc kubenswrapper[4747]: I1202 17:16:05.051324 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-djdrn"] Dec 02 17:16:05 crc kubenswrapper[4747]: I1202 17:16:05.062987 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-ktcxd"] Dec 02 17:16:05 crc kubenswrapper[4747]: I1202 17:16:05.074185 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-djdrn"] Dec 02 17:16:05 crc kubenswrapper[4747]: I1202 17:16:05.080898 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-ktcxd"] Dec 02 17:16:05 crc kubenswrapper[4747]: I1202 17:16:05.772033 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ccc4c22-9c49-461c-9c53-404bdc932adc" path="/var/lib/kubelet/pods/5ccc4c22-9c49-461c-9c53-404bdc932adc/volumes" Dec 02 17:16:05 crc kubenswrapper[4747]: I1202 17:16:05.772834 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d56a0e01-8f6b-409a-96c1-5b67f7fc3528" path="/var/lib/kubelet/pods/d56a0e01-8f6b-409a-96c1-5b67f7fc3528/volumes" Dec 02 17:16:06 crc kubenswrapper[4747]: I1202 17:16:06.761770 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:16:07 crc kubenswrapper[4747]: I1202 17:16:07.022707 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"ee94c253b80a66198a4fcfc6f17914c7c8d36b3792d805d2a1e16be971d427a2"} Dec 02 17:16:07 crc kubenswrapper[4747]: I1202 17:16:07.517569 4747 scope.go:117] "RemoveContainer" containerID="d768cfd35706ae5bbe1820042ae7a2f12fefaf56490d48d37bce34880873c494" Dec 02 17:16:07 crc kubenswrapper[4747]: I1202 17:16:07.545325 4747 scope.go:117] "RemoveContainer" containerID="f7f238f705e2364781e309764dd179ec75f3f55e29db68c35c61bdf58aece489" Dec 02 17:16:07 crc kubenswrapper[4747]: I1202 17:16:07.649322 4747 scope.go:117] "RemoveContainer" containerID="4fcdd09b3c35225b33d4e07afa5133d3662e8af492e510d5ad1b047eee63dc58" Dec 02 17:16:07 crc kubenswrapper[4747]: I1202 17:16:07.703164 4747 scope.go:117] "RemoveContainer" containerID="0710b4cd7ee17dd73912b3aa3d88cf9c606db2cb971f6d3ff6ae704ff3a2e26a" Dec 02 17:16:07 crc kubenswrapper[4747]: I1202 17:16:07.727484 4747 scope.go:117] "RemoveContainer" containerID="b2a319184355a79fb68f1ff9f62c6fbeed8b9acccb109dcb6d7f12630345fe7b" Dec 02 17:16:08 crc kubenswrapper[4747]: I1202 17:16:08.036648 4747 generic.go:334] "Generic (PLEG): container finished" podID="49daddcc-4224-45ac-b0ba-cb24ddd68dee" containerID="f7cbd728c806fd7d61d4e0f78045e35a0b4acbe64ef413a00064e5ef328fb491" exitCode=0 Dec 02 17:16:08 crc kubenswrapper[4747]: I1202 17:16:08.036863 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" event={"ID":"49daddcc-4224-45ac-b0ba-cb24ddd68dee","Type":"ContainerDied","Data":"f7cbd728c806fd7d61d4e0f78045e35a0b4acbe64ef413a00064e5ef328fb491"} Dec 02 17:16:09 crc kubenswrapper[4747]: I1202 17:16:09.507961 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" Dec 02 17:16:09 crc kubenswrapper[4747]: I1202 17:16:09.587280 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwmnn\" (UniqueName: \"kubernetes.io/projected/49daddcc-4224-45ac-b0ba-cb24ddd68dee-kube-api-access-rwmnn\") pod \"49daddcc-4224-45ac-b0ba-cb24ddd68dee\" (UID: \"49daddcc-4224-45ac-b0ba-cb24ddd68dee\") " Dec 02 17:16:09 crc kubenswrapper[4747]: I1202 17:16:09.587427 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49daddcc-4224-45ac-b0ba-cb24ddd68dee-inventory\") pod \"49daddcc-4224-45ac-b0ba-cb24ddd68dee\" (UID: \"49daddcc-4224-45ac-b0ba-cb24ddd68dee\") " Dec 02 17:16:09 crc kubenswrapper[4747]: I1202 17:16:09.587477 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49daddcc-4224-45ac-b0ba-cb24ddd68dee-ssh-key\") pod \"49daddcc-4224-45ac-b0ba-cb24ddd68dee\" (UID: \"49daddcc-4224-45ac-b0ba-cb24ddd68dee\") " Dec 02 17:16:09 crc kubenswrapper[4747]: I1202 17:16:09.593507 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49daddcc-4224-45ac-b0ba-cb24ddd68dee-kube-api-access-rwmnn" (OuterVolumeSpecName: "kube-api-access-rwmnn") pod "49daddcc-4224-45ac-b0ba-cb24ddd68dee" (UID: "49daddcc-4224-45ac-b0ba-cb24ddd68dee"). InnerVolumeSpecName "kube-api-access-rwmnn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:16:09 crc kubenswrapper[4747]: I1202 17:16:09.615566 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49daddcc-4224-45ac-b0ba-cb24ddd68dee-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "49daddcc-4224-45ac-b0ba-cb24ddd68dee" (UID: "49daddcc-4224-45ac-b0ba-cb24ddd68dee"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:16:09 crc kubenswrapper[4747]: I1202 17:16:09.634726 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49daddcc-4224-45ac-b0ba-cb24ddd68dee-inventory" (OuterVolumeSpecName: "inventory") pod "49daddcc-4224-45ac-b0ba-cb24ddd68dee" (UID: "49daddcc-4224-45ac-b0ba-cb24ddd68dee"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:16:09 crc kubenswrapper[4747]: I1202 17:16:09.689433 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49daddcc-4224-45ac-b0ba-cb24ddd68dee-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 17:16:09 crc kubenswrapper[4747]: I1202 17:16:09.689470 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49daddcc-4224-45ac-b0ba-cb24ddd68dee-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:16:09 crc kubenswrapper[4747]: I1202 17:16:09.689483 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwmnn\" (UniqueName: \"kubernetes.io/projected/49daddcc-4224-45ac-b0ba-cb24ddd68dee-kube-api-access-rwmnn\") on node \"crc\" DevicePath \"\"" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.059846 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" event={"ID":"49daddcc-4224-45ac-b0ba-cb24ddd68dee","Type":"ContainerDied","Data":"2400369af786a1a2e5ecb7e5fe9c2a59d83663a1c863ff56bf73550e5bdc5822"} Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.060158 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2400369af786a1a2e5ecb7e5fe9c2a59d83663a1c863ff56bf73550e5bdc5822" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.059999 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-65pqk" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.160685 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2"] Dec 02 17:16:10 crc kubenswrapper[4747]: E1202 17:16:10.161061 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="959719b4-21e9-4f58-8b18-f69c5126b232" containerName="registry-server" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.161077 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="959719b4-21e9-4f58-8b18-f69c5126b232" containerName="registry-server" Dec 02 17:16:10 crc kubenswrapper[4747]: E1202 17:16:10.161090 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="959719b4-21e9-4f58-8b18-f69c5126b232" containerName="extract-content" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.161098 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="959719b4-21e9-4f58-8b18-f69c5126b232" containerName="extract-content" Dec 02 17:16:10 crc kubenswrapper[4747]: E1202 17:16:10.161117 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="959719b4-21e9-4f58-8b18-f69c5126b232" containerName="extract-utilities" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.161124 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="959719b4-21e9-4f58-8b18-f69c5126b232" containerName="extract-utilities" Dec 02 17:16:10 crc kubenswrapper[4747]: E1202 17:16:10.161136 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49daddcc-4224-45ac-b0ba-cb24ddd68dee" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.161143 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="49daddcc-4224-45ac-b0ba-cb24ddd68dee" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.161338 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="49daddcc-4224-45ac-b0ba-cb24ddd68dee" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.161355 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="959719b4-21e9-4f58-8b18-f69c5126b232" containerName="registry-server" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.161961 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.166732 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.166757 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.170183 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2"] Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.166834 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.167014 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mt9sq" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.202208 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7e920075-6847-4768-b952-8a76f23acddd-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2\" (UID: \"7e920075-6847-4768-b952-8a76f23acddd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.202260 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7e920075-6847-4768-b952-8a76f23acddd-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2\" (UID: \"7e920075-6847-4768-b952-8a76f23acddd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.202339 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxh7r\" (UniqueName: \"kubernetes.io/projected/7e920075-6847-4768-b952-8a76f23acddd-kube-api-access-jxh7r\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2\" (UID: \"7e920075-6847-4768-b952-8a76f23acddd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.303610 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxh7r\" (UniqueName: \"kubernetes.io/projected/7e920075-6847-4768-b952-8a76f23acddd-kube-api-access-jxh7r\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2\" (UID: \"7e920075-6847-4768-b952-8a76f23acddd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.303826 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7e920075-6847-4768-b952-8a76f23acddd-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2\" (UID: \"7e920075-6847-4768-b952-8a76f23acddd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.303870 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7e920075-6847-4768-b952-8a76f23acddd-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2\" (UID: \"7e920075-6847-4768-b952-8a76f23acddd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.308404 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7e920075-6847-4768-b952-8a76f23acddd-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2\" (UID: \"7e920075-6847-4768-b952-8a76f23acddd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.308788 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7e920075-6847-4768-b952-8a76f23acddd-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2\" (UID: \"7e920075-6847-4768-b952-8a76f23acddd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.325091 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxh7r\" (UniqueName: \"kubernetes.io/projected/7e920075-6847-4768-b952-8a76f23acddd-kube-api-access-jxh7r\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2\" (UID: \"7e920075-6847-4768-b952-8a76f23acddd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" Dec 02 17:16:10 crc kubenswrapper[4747]: I1202 17:16:10.490187 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" Dec 02 17:16:11 crc kubenswrapper[4747]: I1202 17:16:11.027098 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2"] Dec 02 17:16:11 crc kubenswrapper[4747]: W1202 17:16:11.037619 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e920075_6847_4768_b952_8a76f23acddd.slice/crio-1ce7cdc3877bccf6e5310af8316c22595654095feadacbbd3430ecd70907543a WatchSource:0}: Error finding container 1ce7cdc3877bccf6e5310af8316c22595654095feadacbbd3430ecd70907543a: Status 404 returned error can't find the container with id 1ce7cdc3877bccf6e5310af8316c22595654095feadacbbd3430ecd70907543a Dec 02 17:16:11 crc kubenswrapper[4747]: I1202 17:16:11.072246 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" event={"ID":"7e920075-6847-4768-b952-8a76f23acddd","Type":"ContainerStarted","Data":"1ce7cdc3877bccf6e5310af8316c22595654095feadacbbd3430ecd70907543a"} Dec 02 17:16:12 crc kubenswrapper[4747]: I1202 17:16:12.090168 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" event={"ID":"7e920075-6847-4768-b952-8a76f23acddd","Type":"ContainerStarted","Data":"88a4ae9f6982fbe0e8a10851ee81d3188264702e717e6986298d34fddcb6a47b"} Dec 02 17:16:12 crc kubenswrapper[4747]: I1202 17:16:12.110404 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" podStartSLOduration=1.94292184 podStartE2EDuration="2.110380879s" podCreationTimestamp="2025-12-02 17:16:10 +0000 UTC" firstStartedPulling="2025-12-02 17:16:11.040870392 +0000 UTC m=+2001.567759141" lastFinishedPulling="2025-12-02 17:16:11.208329421 +0000 UTC m=+2001.735218180" observedRunningTime="2025-12-02 17:16:12.110201634 +0000 UTC m=+2002.637090393" watchObservedRunningTime="2025-12-02 17:16:12.110380879 +0000 UTC m=+2002.637269628" Dec 02 17:16:49 crc kubenswrapper[4747]: I1202 17:16:49.050529 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-zlcpr"] Dec 02 17:16:49 crc kubenswrapper[4747]: I1202 17:16:49.060360 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-zlcpr"] Dec 02 17:16:49 crc kubenswrapper[4747]: I1202 17:16:49.775393 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5c1a898-e20a-4ac8-bf8f-d798a2298f57" path="/var/lib/kubelet/pods/b5c1a898-e20a-4ac8-bf8f-d798a2298f57/volumes" Dec 02 17:17:00 crc kubenswrapper[4747]: I1202 17:17:00.873682 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-787hj"] Dec 02 17:17:00 crc kubenswrapper[4747]: I1202 17:17:00.878209 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-787hj" Dec 02 17:17:00 crc kubenswrapper[4747]: I1202 17:17:00.886274 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-787hj"] Dec 02 17:17:00 crc kubenswrapper[4747]: I1202 17:17:00.950056 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fac1678-71a0-42df-8ee8-e99d48e38666-catalog-content\") pod \"redhat-marketplace-787hj\" (UID: \"9fac1678-71a0-42df-8ee8-e99d48e38666\") " pod="openshift-marketplace/redhat-marketplace-787hj" Dec 02 17:17:00 crc kubenswrapper[4747]: I1202 17:17:00.950110 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fac1678-71a0-42df-8ee8-e99d48e38666-utilities\") pod \"redhat-marketplace-787hj\" (UID: \"9fac1678-71a0-42df-8ee8-e99d48e38666\") " pod="openshift-marketplace/redhat-marketplace-787hj" Dec 02 17:17:00 crc kubenswrapper[4747]: I1202 17:17:00.950453 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t852z\" (UniqueName: \"kubernetes.io/projected/9fac1678-71a0-42df-8ee8-e99d48e38666-kube-api-access-t852z\") pod \"redhat-marketplace-787hj\" (UID: \"9fac1678-71a0-42df-8ee8-e99d48e38666\") " pod="openshift-marketplace/redhat-marketplace-787hj" Dec 02 17:17:01 crc kubenswrapper[4747]: I1202 17:17:01.051822 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t852z\" (UniqueName: \"kubernetes.io/projected/9fac1678-71a0-42df-8ee8-e99d48e38666-kube-api-access-t852z\") pod \"redhat-marketplace-787hj\" (UID: \"9fac1678-71a0-42df-8ee8-e99d48e38666\") " pod="openshift-marketplace/redhat-marketplace-787hj" Dec 02 17:17:01 crc kubenswrapper[4747]: I1202 17:17:01.051938 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fac1678-71a0-42df-8ee8-e99d48e38666-catalog-content\") pod \"redhat-marketplace-787hj\" (UID: \"9fac1678-71a0-42df-8ee8-e99d48e38666\") " pod="openshift-marketplace/redhat-marketplace-787hj" Dec 02 17:17:01 crc kubenswrapper[4747]: I1202 17:17:01.051971 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fac1678-71a0-42df-8ee8-e99d48e38666-utilities\") pod \"redhat-marketplace-787hj\" (UID: \"9fac1678-71a0-42df-8ee8-e99d48e38666\") " pod="openshift-marketplace/redhat-marketplace-787hj" Dec 02 17:17:01 crc kubenswrapper[4747]: I1202 17:17:01.052541 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fac1678-71a0-42df-8ee8-e99d48e38666-utilities\") pod \"redhat-marketplace-787hj\" (UID: \"9fac1678-71a0-42df-8ee8-e99d48e38666\") " pod="openshift-marketplace/redhat-marketplace-787hj" Dec 02 17:17:01 crc kubenswrapper[4747]: I1202 17:17:01.052998 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fac1678-71a0-42df-8ee8-e99d48e38666-catalog-content\") pod \"redhat-marketplace-787hj\" (UID: \"9fac1678-71a0-42df-8ee8-e99d48e38666\") " pod="openshift-marketplace/redhat-marketplace-787hj" Dec 02 17:17:01 crc kubenswrapper[4747]: I1202 17:17:01.080503 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t852z\" (UniqueName: \"kubernetes.io/projected/9fac1678-71a0-42df-8ee8-e99d48e38666-kube-api-access-t852z\") pod \"redhat-marketplace-787hj\" (UID: \"9fac1678-71a0-42df-8ee8-e99d48e38666\") " pod="openshift-marketplace/redhat-marketplace-787hj" Dec 02 17:17:01 crc kubenswrapper[4747]: I1202 17:17:01.200760 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-787hj" Dec 02 17:17:01 crc kubenswrapper[4747]: I1202 17:17:01.703693 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-787hj"] Dec 02 17:17:02 crc kubenswrapper[4747]: I1202 17:17:02.568030 4747 generic.go:334] "Generic (PLEG): container finished" podID="9fac1678-71a0-42df-8ee8-e99d48e38666" containerID="3aa89a04ad5bb575f47bbcca6af2ebac874145031d386d5aa43f62a808184214" exitCode=0 Dec 02 17:17:02 crc kubenswrapper[4747]: I1202 17:17:02.568335 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-787hj" event={"ID":"9fac1678-71a0-42df-8ee8-e99d48e38666","Type":"ContainerDied","Data":"3aa89a04ad5bb575f47bbcca6af2ebac874145031d386d5aa43f62a808184214"} Dec 02 17:17:02 crc kubenswrapper[4747]: I1202 17:17:02.568360 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-787hj" event={"ID":"9fac1678-71a0-42df-8ee8-e99d48e38666","Type":"ContainerStarted","Data":"f6b8c08f5f569d6e434e87ec9c01cbbab56580a7044bc14cc96645b2258e1a31"} Dec 02 17:17:04 crc kubenswrapper[4747]: I1202 17:17:04.589989 4747 generic.go:334] "Generic (PLEG): container finished" podID="9fac1678-71a0-42df-8ee8-e99d48e38666" containerID="e855da3e041b3e79b6e184f22ad0bcf099a174319ffcf661112a507c4b06bfc8" exitCode=0 Dec 02 17:17:04 crc kubenswrapper[4747]: I1202 17:17:04.590100 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-787hj" event={"ID":"9fac1678-71a0-42df-8ee8-e99d48e38666","Type":"ContainerDied","Data":"e855da3e041b3e79b6e184f22ad0bcf099a174319ffcf661112a507c4b06bfc8"} Dec 02 17:17:05 crc kubenswrapper[4747]: I1202 17:17:05.602657 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-787hj" event={"ID":"9fac1678-71a0-42df-8ee8-e99d48e38666","Type":"ContainerStarted","Data":"49e216387faa68bca5d82e748bd8fcd488cb390c2c613653aa6591c588cc5a28"} Dec 02 17:17:05 crc kubenswrapper[4747]: I1202 17:17:05.618956 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-787hj" podStartSLOduration=3.099839918 podStartE2EDuration="5.618938106s" podCreationTimestamp="2025-12-02 17:17:00 +0000 UTC" firstStartedPulling="2025-12-02 17:17:02.570294077 +0000 UTC m=+2053.097182826" lastFinishedPulling="2025-12-02 17:17:05.089392265 +0000 UTC m=+2055.616281014" observedRunningTime="2025-12-02 17:17:05.616801365 +0000 UTC m=+2056.143690114" watchObservedRunningTime="2025-12-02 17:17:05.618938106 +0000 UTC m=+2056.145826855" Dec 02 17:17:06 crc kubenswrapper[4747]: I1202 17:17:06.614107 4747 generic.go:334] "Generic (PLEG): container finished" podID="7e920075-6847-4768-b952-8a76f23acddd" containerID="88a4ae9f6982fbe0e8a10851ee81d3188264702e717e6986298d34fddcb6a47b" exitCode=0 Dec 02 17:17:06 crc kubenswrapper[4747]: I1202 17:17:06.614160 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" event={"ID":"7e920075-6847-4768-b952-8a76f23acddd","Type":"ContainerDied","Data":"88a4ae9f6982fbe0e8a10851ee81d3188264702e717e6986298d34fddcb6a47b"} Dec 02 17:17:07 crc kubenswrapper[4747]: I1202 17:17:07.909094 4747 scope.go:117] "RemoveContainer" containerID="c18cbe7b76679569e3b73dfeb3211e67d0dc62590f2ad71570275bc3b48bad7e" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.126180 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.202857 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxh7r\" (UniqueName: \"kubernetes.io/projected/7e920075-6847-4768-b952-8a76f23acddd-kube-api-access-jxh7r\") pod \"7e920075-6847-4768-b952-8a76f23acddd\" (UID: \"7e920075-6847-4768-b952-8a76f23acddd\") " Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.203111 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7e920075-6847-4768-b952-8a76f23acddd-inventory\") pod \"7e920075-6847-4768-b952-8a76f23acddd\" (UID: \"7e920075-6847-4768-b952-8a76f23acddd\") " Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.203191 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7e920075-6847-4768-b952-8a76f23acddd-ssh-key\") pod \"7e920075-6847-4768-b952-8a76f23acddd\" (UID: \"7e920075-6847-4768-b952-8a76f23acddd\") " Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.208604 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e920075-6847-4768-b952-8a76f23acddd-kube-api-access-jxh7r" (OuterVolumeSpecName: "kube-api-access-jxh7r") pod "7e920075-6847-4768-b952-8a76f23acddd" (UID: "7e920075-6847-4768-b952-8a76f23acddd"). InnerVolumeSpecName "kube-api-access-jxh7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.231800 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e920075-6847-4768-b952-8a76f23acddd-inventory" (OuterVolumeSpecName: "inventory") pod "7e920075-6847-4768-b952-8a76f23acddd" (UID: "7e920075-6847-4768-b952-8a76f23acddd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.238523 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e920075-6847-4768-b952-8a76f23acddd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7e920075-6847-4768-b952-8a76f23acddd" (UID: "7e920075-6847-4768-b952-8a76f23acddd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.305755 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7e920075-6847-4768-b952-8a76f23acddd-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.305817 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7e920075-6847-4768-b952-8a76f23acddd-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.305832 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxh7r\" (UniqueName: \"kubernetes.io/projected/7e920075-6847-4768-b952-8a76f23acddd-kube-api-access-jxh7r\") on node \"crc\" DevicePath \"\"" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.635283 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" event={"ID":"7e920075-6847-4768-b952-8a76f23acddd","Type":"ContainerDied","Data":"1ce7cdc3877bccf6e5310af8316c22595654095feadacbbd3430ecd70907543a"} Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.635324 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ce7cdc3877bccf6e5310af8316c22595654095feadacbbd3430ecd70907543a" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.635373 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.773570 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-zfq7s"] Dec 02 17:17:08 crc kubenswrapper[4747]: E1202 17:17:08.774230 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e920075-6847-4768-b952-8a76f23acddd" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.774259 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e920075-6847-4768-b952-8a76f23acddd" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.774645 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e920075-6847-4768-b952-8a76f23acddd" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.775773 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.777947 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mt9sq" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.778299 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.778404 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.779105 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.783717 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-zfq7s"] Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.815707 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-zfq7s\" (UID: \"ee78ab8c-165e-4f44-9b66-d74f7aa4d397\") " pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.816501 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fs84\" (UniqueName: \"kubernetes.io/projected/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-kube-api-access-4fs84\") pod \"ssh-known-hosts-edpm-deployment-zfq7s\" (UID: \"ee78ab8c-165e-4f44-9b66-d74f7aa4d397\") " pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.816628 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-zfq7s\" (UID: \"ee78ab8c-165e-4f44-9b66-d74f7aa4d397\") " pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.918233 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-zfq7s\" (UID: \"ee78ab8c-165e-4f44-9b66-d74f7aa4d397\") " pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.918365 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fs84\" (UniqueName: \"kubernetes.io/projected/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-kube-api-access-4fs84\") pod \"ssh-known-hosts-edpm-deployment-zfq7s\" (UID: \"ee78ab8c-165e-4f44-9b66-d74f7aa4d397\") " pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.918394 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-zfq7s\" (UID: \"ee78ab8c-165e-4f44-9b66-d74f7aa4d397\") " pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.923736 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-zfq7s\" (UID: \"ee78ab8c-165e-4f44-9b66-d74f7aa4d397\") " pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.924738 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-zfq7s\" (UID: \"ee78ab8c-165e-4f44-9b66-d74f7aa4d397\") " pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" Dec 02 17:17:08 crc kubenswrapper[4747]: I1202 17:17:08.936340 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fs84\" (UniqueName: \"kubernetes.io/projected/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-kube-api-access-4fs84\") pod \"ssh-known-hosts-edpm-deployment-zfq7s\" (UID: \"ee78ab8c-165e-4f44-9b66-d74f7aa4d397\") " pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" Dec 02 17:17:09 crc kubenswrapper[4747]: I1202 17:17:09.095374 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" Dec 02 17:17:09 crc kubenswrapper[4747]: I1202 17:17:09.615524 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-zfq7s"] Dec 02 17:17:09 crc kubenswrapper[4747]: I1202 17:17:09.648172 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" event={"ID":"ee78ab8c-165e-4f44-9b66-d74f7aa4d397","Type":"ContainerStarted","Data":"6cc456ffcafbbaaa1145220351944616327d401ea0b74a5d674ea6ed68fa18ae"} Dec 02 17:17:10 crc kubenswrapper[4747]: I1202 17:17:10.660361 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" event={"ID":"ee78ab8c-165e-4f44-9b66-d74f7aa4d397","Type":"ContainerStarted","Data":"5eeb541ae2ab36e5b812d2b06bc8acf86c654e62d083371bf11e3000ec2643bd"} Dec 02 17:17:10 crc kubenswrapper[4747]: I1202 17:17:10.679791 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" podStartSLOduration=2.354764024 podStartE2EDuration="2.679771629s" podCreationTimestamp="2025-12-02 17:17:08 +0000 UTC" firstStartedPulling="2025-12-02 17:17:09.621240314 +0000 UTC m=+2060.148129063" lastFinishedPulling="2025-12-02 17:17:09.946247879 +0000 UTC m=+2060.473136668" observedRunningTime="2025-12-02 17:17:10.676406443 +0000 UTC m=+2061.203295192" watchObservedRunningTime="2025-12-02 17:17:10.679771629 +0000 UTC m=+2061.206660378" Dec 02 17:17:11 crc kubenswrapper[4747]: I1202 17:17:11.203635 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-787hj" Dec 02 17:17:11 crc kubenswrapper[4747]: I1202 17:17:11.204042 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-787hj" Dec 02 17:17:11 crc kubenswrapper[4747]: I1202 17:17:11.253569 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-787hj" Dec 02 17:17:11 crc kubenswrapper[4747]: I1202 17:17:11.720523 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-787hj" Dec 02 17:17:11 crc kubenswrapper[4747]: I1202 17:17:11.771071 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-787hj"] Dec 02 17:17:13 crc kubenswrapper[4747]: I1202 17:17:13.688653 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-787hj" podUID="9fac1678-71a0-42df-8ee8-e99d48e38666" containerName="registry-server" containerID="cri-o://49e216387faa68bca5d82e748bd8fcd488cb390c2c613653aa6591c588cc5a28" gracePeriod=2 Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.177387 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-787hj" Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.229245 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t852z\" (UniqueName: \"kubernetes.io/projected/9fac1678-71a0-42df-8ee8-e99d48e38666-kube-api-access-t852z\") pod \"9fac1678-71a0-42df-8ee8-e99d48e38666\" (UID: \"9fac1678-71a0-42df-8ee8-e99d48e38666\") " Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.229325 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fac1678-71a0-42df-8ee8-e99d48e38666-catalog-content\") pod \"9fac1678-71a0-42df-8ee8-e99d48e38666\" (UID: \"9fac1678-71a0-42df-8ee8-e99d48e38666\") " Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.229352 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fac1678-71a0-42df-8ee8-e99d48e38666-utilities\") pod \"9fac1678-71a0-42df-8ee8-e99d48e38666\" (UID: \"9fac1678-71a0-42df-8ee8-e99d48e38666\") " Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.230627 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fac1678-71a0-42df-8ee8-e99d48e38666-utilities" (OuterVolumeSpecName: "utilities") pod "9fac1678-71a0-42df-8ee8-e99d48e38666" (UID: "9fac1678-71a0-42df-8ee8-e99d48e38666"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.236797 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fac1678-71a0-42df-8ee8-e99d48e38666-kube-api-access-t852z" (OuterVolumeSpecName: "kube-api-access-t852z") pod "9fac1678-71a0-42df-8ee8-e99d48e38666" (UID: "9fac1678-71a0-42df-8ee8-e99d48e38666"). InnerVolumeSpecName "kube-api-access-t852z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.252151 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fac1678-71a0-42df-8ee8-e99d48e38666-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9fac1678-71a0-42df-8ee8-e99d48e38666" (UID: "9fac1678-71a0-42df-8ee8-e99d48e38666"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.331383 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t852z\" (UniqueName: \"kubernetes.io/projected/9fac1678-71a0-42df-8ee8-e99d48e38666-kube-api-access-t852z\") on node \"crc\" DevicePath \"\"" Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.331422 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fac1678-71a0-42df-8ee8-e99d48e38666-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.331431 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fac1678-71a0-42df-8ee8-e99d48e38666-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.702729 4747 generic.go:334] "Generic (PLEG): container finished" podID="9fac1678-71a0-42df-8ee8-e99d48e38666" containerID="49e216387faa68bca5d82e748bd8fcd488cb390c2c613653aa6591c588cc5a28" exitCode=0 Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.702774 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-787hj" event={"ID":"9fac1678-71a0-42df-8ee8-e99d48e38666","Type":"ContainerDied","Data":"49e216387faa68bca5d82e748bd8fcd488cb390c2c613653aa6591c588cc5a28"} Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.702788 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-787hj" Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.702804 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-787hj" event={"ID":"9fac1678-71a0-42df-8ee8-e99d48e38666","Type":"ContainerDied","Data":"f6b8c08f5f569d6e434e87ec9c01cbbab56580a7044bc14cc96645b2258e1a31"} Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.702822 4747 scope.go:117] "RemoveContainer" containerID="49e216387faa68bca5d82e748bd8fcd488cb390c2c613653aa6591c588cc5a28" Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.736979 4747 scope.go:117] "RemoveContainer" containerID="e855da3e041b3e79b6e184f22ad0bcf099a174319ffcf661112a507c4b06bfc8" Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.755743 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-787hj"] Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.759842 4747 scope.go:117] "RemoveContainer" containerID="3aa89a04ad5bb575f47bbcca6af2ebac874145031d386d5aa43f62a808184214" Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.765212 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-787hj"] Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.820173 4747 scope.go:117] "RemoveContainer" containerID="49e216387faa68bca5d82e748bd8fcd488cb390c2c613653aa6591c588cc5a28" Dec 02 17:17:14 crc kubenswrapper[4747]: E1202 17:17:14.821371 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49e216387faa68bca5d82e748bd8fcd488cb390c2c613653aa6591c588cc5a28\": container with ID starting with 49e216387faa68bca5d82e748bd8fcd488cb390c2c613653aa6591c588cc5a28 not found: ID does not exist" containerID="49e216387faa68bca5d82e748bd8fcd488cb390c2c613653aa6591c588cc5a28" Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.821420 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49e216387faa68bca5d82e748bd8fcd488cb390c2c613653aa6591c588cc5a28"} err="failed to get container status \"49e216387faa68bca5d82e748bd8fcd488cb390c2c613653aa6591c588cc5a28\": rpc error: code = NotFound desc = could not find container \"49e216387faa68bca5d82e748bd8fcd488cb390c2c613653aa6591c588cc5a28\": container with ID starting with 49e216387faa68bca5d82e748bd8fcd488cb390c2c613653aa6591c588cc5a28 not found: ID does not exist" Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.821447 4747 scope.go:117] "RemoveContainer" containerID="e855da3e041b3e79b6e184f22ad0bcf099a174319ffcf661112a507c4b06bfc8" Dec 02 17:17:14 crc kubenswrapper[4747]: E1202 17:17:14.821950 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e855da3e041b3e79b6e184f22ad0bcf099a174319ffcf661112a507c4b06bfc8\": container with ID starting with e855da3e041b3e79b6e184f22ad0bcf099a174319ffcf661112a507c4b06bfc8 not found: ID does not exist" containerID="e855da3e041b3e79b6e184f22ad0bcf099a174319ffcf661112a507c4b06bfc8" Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.821978 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e855da3e041b3e79b6e184f22ad0bcf099a174319ffcf661112a507c4b06bfc8"} err="failed to get container status \"e855da3e041b3e79b6e184f22ad0bcf099a174319ffcf661112a507c4b06bfc8\": rpc error: code = NotFound desc = could not find container \"e855da3e041b3e79b6e184f22ad0bcf099a174319ffcf661112a507c4b06bfc8\": container with ID starting with e855da3e041b3e79b6e184f22ad0bcf099a174319ffcf661112a507c4b06bfc8 not found: ID does not exist" Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.821996 4747 scope.go:117] "RemoveContainer" containerID="3aa89a04ad5bb575f47bbcca6af2ebac874145031d386d5aa43f62a808184214" Dec 02 17:17:14 crc kubenswrapper[4747]: E1202 17:17:14.822334 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3aa89a04ad5bb575f47bbcca6af2ebac874145031d386d5aa43f62a808184214\": container with ID starting with 3aa89a04ad5bb575f47bbcca6af2ebac874145031d386d5aa43f62a808184214 not found: ID does not exist" containerID="3aa89a04ad5bb575f47bbcca6af2ebac874145031d386d5aa43f62a808184214" Dec 02 17:17:14 crc kubenswrapper[4747]: I1202 17:17:14.822360 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3aa89a04ad5bb575f47bbcca6af2ebac874145031d386d5aa43f62a808184214"} err="failed to get container status \"3aa89a04ad5bb575f47bbcca6af2ebac874145031d386d5aa43f62a808184214\": rpc error: code = NotFound desc = could not find container \"3aa89a04ad5bb575f47bbcca6af2ebac874145031d386d5aa43f62a808184214\": container with ID starting with 3aa89a04ad5bb575f47bbcca6af2ebac874145031d386d5aa43f62a808184214 not found: ID does not exist" Dec 02 17:17:15 crc kubenswrapper[4747]: I1202 17:17:15.778573 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fac1678-71a0-42df-8ee8-e99d48e38666" path="/var/lib/kubelet/pods/9fac1678-71a0-42df-8ee8-e99d48e38666/volumes" Dec 02 17:17:17 crc kubenswrapper[4747]: I1202 17:17:17.739119 4747 generic.go:334] "Generic (PLEG): container finished" podID="ee78ab8c-165e-4f44-9b66-d74f7aa4d397" containerID="5eeb541ae2ab36e5b812d2b06bc8acf86c654e62d083371bf11e3000ec2643bd" exitCode=0 Dec 02 17:17:17 crc kubenswrapper[4747]: I1202 17:17:17.739259 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" event={"ID":"ee78ab8c-165e-4f44-9b66-d74f7aa4d397","Type":"ContainerDied","Data":"5eeb541ae2ab36e5b812d2b06bc8acf86c654e62d083371bf11e3000ec2643bd"} Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.139554 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.236711 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-ssh-key-openstack-edpm-ipam\") pod \"ee78ab8c-165e-4f44-9b66-d74f7aa4d397\" (UID: \"ee78ab8c-165e-4f44-9b66-d74f7aa4d397\") " Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.237186 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fs84\" (UniqueName: \"kubernetes.io/projected/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-kube-api-access-4fs84\") pod \"ee78ab8c-165e-4f44-9b66-d74f7aa4d397\" (UID: \"ee78ab8c-165e-4f44-9b66-d74f7aa4d397\") " Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.237298 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-inventory-0\") pod \"ee78ab8c-165e-4f44-9b66-d74f7aa4d397\" (UID: \"ee78ab8c-165e-4f44-9b66-d74f7aa4d397\") " Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.241958 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-kube-api-access-4fs84" (OuterVolumeSpecName: "kube-api-access-4fs84") pod "ee78ab8c-165e-4f44-9b66-d74f7aa4d397" (UID: "ee78ab8c-165e-4f44-9b66-d74f7aa4d397"). InnerVolumeSpecName "kube-api-access-4fs84". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.263801 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "ee78ab8c-165e-4f44-9b66-d74f7aa4d397" (UID: "ee78ab8c-165e-4f44-9b66-d74f7aa4d397"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.265059 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "ee78ab8c-165e-4f44-9b66-d74f7aa4d397" (UID: "ee78ab8c-165e-4f44-9b66-d74f7aa4d397"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.340145 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.340193 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fs84\" (UniqueName: \"kubernetes.io/projected/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-kube-api-access-4fs84\") on node \"crc\" DevicePath \"\"" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.340207 4747 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ee78ab8c-165e-4f44-9b66-d74f7aa4d397-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.759107 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" event={"ID":"ee78ab8c-165e-4f44-9b66-d74f7aa4d397","Type":"ContainerDied","Data":"6cc456ffcafbbaaa1145220351944616327d401ea0b74a5d674ea6ed68fa18ae"} Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.759173 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6cc456ffcafbbaaa1145220351944616327d401ea0b74a5d674ea6ed68fa18ae" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.759189 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-zfq7s" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.854338 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt"] Dec 02 17:17:19 crc kubenswrapper[4747]: E1202 17:17:19.855097 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fac1678-71a0-42df-8ee8-e99d48e38666" containerName="extract-content" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.860228 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fac1678-71a0-42df-8ee8-e99d48e38666" containerName="extract-content" Dec 02 17:17:19 crc kubenswrapper[4747]: E1202 17:17:19.860355 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fac1678-71a0-42df-8ee8-e99d48e38666" containerName="registry-server" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.860421 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fac1678-71a0-42df-8ee8-e99d48e38666" containerName="registry-server" Dec 02 17:17:19 crc kubenswrapper[4747]: E1202 17:17:19.860564 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee78ab8c-165e-4f44-9b66-d74f7aa4d397" containerName="ssh-known-hosts-edpm-deployment" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.860631 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee78ab8c-165e-4f44-9b66-d74f7aa4d397" containerName="ssh-known-hosts-edpm-deployment" Dec 02 17:17:19 crc kubenswrapper[4747]: E1202 17:17:19.860709 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fac1678-71a0-42df-8ee8-e99d48e38666" containerName="extract-utilities" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.860771 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fac1678-71a0-42df-8ee8-e99d48e38666" containerName="extract-utilities" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.861212 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fac1678-71a0-42df-8ee8-e99d48e38666" containerName="registry-server" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.861329 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee78ab8c-165e-4f44-9b66-d74f7aa4d397" containerName="ssh-known-hosts-edpm-deployment" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.862526 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.865881 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.866897 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mt9sq" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.866959 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.869276 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.881040 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt"] Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.953447 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7sddt\" (UID: \"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.953802 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7sddt\" (UID: \"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" Dec 02 17:17:19 crc kubenswrapper[4747]: I1202 17:17:19.954033 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dbst\" (UniqueName: \"kubernetes.io/projected/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-kube-api-access-7dbst\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7sddt\" (UID: \"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" Dec 02 17:17:20 crc kubenswrapper[4747]: I1202 17:17:20.056034 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7sddt\" (UID: \"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" Dec 02 17:17:20 crc kubenswrapper[4747]: I1202 17:17:20.056069 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7sddt\" (UID: \"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" Dec 02 17:17:20 crc kubenswrapper[4747]: I1202 17:17:20.056121 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dbst\" (UniqueName: \"kubernetes.io/projected/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-kube-api-access-7dbst\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7sddt\" (UID: \"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" Dec 02 17:17:20 crc kubenswrapper[4747]: I1202 17:17:20.059673 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7sddt\" (UID: \"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" Dec 02 17:17:20 crc kubenswrapper[4747]: I1202 17:17:20.061482 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7sddt\" (UID: \"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" Dec 02 17:17:20 crc kubenswrapper[4747]: I1202 17:17:20.075885 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dbst\" (UniqueName: \"kubernetes.io/projected/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-kube-api-access-7dbst\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-7sddt\" (UID: \"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" Dec 02 17:17:20 crc kubenswrapper[4747]: I1202 17:17:20.196725 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" Dec 02 17:17:20 crc kubenswrapper[4747]: I1202 17:17:20.717509 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 17:17:20 crc kubenswrapper[4747]: I1202 17:17:20.719572 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt"] Dec 02 17:17:20 crc kubenswrapper[4747]: I1202 17:17:20.767806 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" event={"ID":"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790","Type":"ContainerStarted","Data":"7b3c6dac403bbecc327ba0773b7410c984813ca208a3c215ae481e0b9df10ea8"} Dec 02 17:17:21 crc kubenswrapper[4747]: I1202 17:17:21.779300 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" event={"ID":"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790","Type":"ContainerStarted","Data":"da359d2c30f8a089b5ffa1cd7acd6e97c6bbde081d3ce0e9dd1cfc2f9e4a72cc"} Dec 02 17:17:21 crc kubenswrapper[4747]: I1202 17:17:21.807177 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" podStartSLOduration=2.650699469 podStartE2EDuration="2.807154675s" podCreationTimestamp="2025-12-02 17:17:19 +0000 UTC" firstStartedPulling="2025-12-02 17:17:20.717292028 +0000 UTC m=+2071.244180777" lastFinishedPulling="2025-12-02 17:17:20.873747234 +0000 UTC m=+2071.400635983" observedRunningTime="2025-12-02 17:17:21.800486395 +0000 UTC m=+2072.327375164" watchObservedRunningTime="2025-12-02 17:17:21.807154675 +0000 UTC m=+2072.334043424" Dec 02 17:17:28 crc kubenswrapper[4747]: I1202 17:17:28.837340 4747 generic.go:334] "Generic (PLEG): container finished" podID="4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790" containerID="da359d2c30f8a089b5ffa1cd7acd6e97c6bbde081d3ce0e9dd1cfc2f9e4a72cc" exitCode=0 Dec 02 17:17:28 crc kubenswrapper[4747]: I1202 17:17:28.837423 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" event={"ID":"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790","Type":"ContainerDied","Data":"da359d2c30f8a089b5ffa1cd7acd6e97c6bbde081d3ce0e9dd1cfc2f9e4a72cc"} Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.209554 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.365450 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dbst\" (UniqueName: \"kubernetes.io/projected/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-kube-api-access-7dbst\") pod \"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790\" (UID: \"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790\") " Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.365586 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-ssh-key\") pod \"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790\" (UID: \"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790\") " Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.365628 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-inventory\") pod \"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790\" (UID: \"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790\") " Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.372099 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-kube-api-access-7dbst" (OuterVolumeSpecName: "kube-api-access-7dbst") pod "4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790" (UID: "4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790"). InnerVolumeSpecName "kube-api-access-7dbst". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.395874 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790" (UID: "4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.401782 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-inventory" (OuterVolumeSpecName: "inventory") pod "4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790" (UID: "4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.468310 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dbst\" (UniqueName: \"kubernetes.io/projected/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-kube-api-access-7dbst\") on node \"crc\" DevicePath \"\"" Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.468346 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.468357 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.859773 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" event={"ID":"4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790","Type":"ContainerDied","Data":"7b3c6dac403bbecc327ba0773b7410c984813ca208a3c215ae481e0b9df10ea8"} Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.860193 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b3c6dac403bbecc327ba0773b7410c984813ca208a3c215ae481e0b9df10ea8" Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.860264 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-7sddt" Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.923030 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv"] Dec 02 17:17:30 crc kubenswrapper[4747]: E1202 17:17:30.923520 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.923544 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.923750 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.924473 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.927009 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.929615 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.934694 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.934750 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mt9sq" Dec 02 17:17:30 crc kubenswrapper[4747]: I1202 17:17:30.936266 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv"] Dec 02 17:17:31 crc kubenswrapper[4747]: I1202 17:17:31.080023 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b7b40fdd-ba3d-48fa-b374-279795ee9a44-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv\" (UID: \"b7b40fdd-ba3d-48fa-b374-279795ee9a44\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" Dec 02 17:17:31 crc kubenswrapper[4747]: I1202 17:17:31.080216 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7b40fdd-ba3d-48fa-b374-279795ee9a44-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv\" (UID: \"b7b40fdd-ba3d-48fa-b374-279795ee9a44\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" Dec 02 17:17:31 crc kubenswrapper[4747]: I1202 17:17:31.080272 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9vxt\" (UniqueName: \"kubernetes.io/projected/b7b40fdd-ba3d-48fa-b374-279795ee9a44-kube-api-access-r9vxt\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv\" (UID: \"b7b40fdd-ba3d-48fa-b374-279795ee9a44\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" Dec 02 17:17:31 crc kubenswrapper[4747]: I1202 17:17:31.181931 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7b40fdd-ba3d-48fa-b374-279795ee9a44-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv\" (UID: \"b7b40fdd-ba3d-48fa-b374-279795ee9a44\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" Dec 02 17:17:31 crc kubenswrapper[4747]: I1202 17:17:31.182002 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9vxt\" (UniqueName: \"kubernetes.io/projected/b7b40fdd-ba3d-48fa-b374-279795ee9a44-kube-api-access-r9vxt\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv\" (UID: \"b7b40fdd-ba3d-48fa-b374-279795ee9a44\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" Dec 02 17:17:31 crc kubenswrapper[4747]: I1202 17:17:31.182097 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b7b40fdd-ba3d-48fa-b374-279795ee9a44-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv\" (UID: \"b7b40fdd-ba3d-48fa-b374-279795ee9a44\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" Dec 02 17:17:31 crc kubenswrapper[4747]: I1202 17:17:31.186655 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b7b40fdd-ba3d-48fa-b374-279795ee9a44-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv\" (UID: \"b7b40fdd-ba3d-48fa-b374-279795ee9a44\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" Dec 02 17:17:31 crc kubenswrapper[4747]: I1202 17:17:31.191996 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7b40fdd-ba3d-48fa-b374-279795ee9a44-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv\" (UID: \"b7b40fdd-ba3d-48fa-b374-279795ee9a44\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" Dec 02 17:17:31 crc kubenswrapper[4747]: I1202 17:17:31.204257 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9vxt\" (UniqueName: \"kubernetes.io/projected/b7b40fdd-ba3d-48fa-b374-279795ee9a44-kube-api-access-r9vxt\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv\" (UID: \"b7b40fdd-ba3d-48fa-b374-279795ee9a44\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" Dec 02 17:17:31 crc kubenswrapper[4747]: I1202 17:17:31.244589 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" Dec 02 17:17:31 crc kubenswrapper[4747]: I1202 17:17:31.750805 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv"] Dec 02 17:17:31 crc kubenswrapper[4747]: I1202 17:17:31.868262 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" event={"ID":"b7b40fdd-ba3d-48fa-b374-279795ee9a44","Type":"ContainerStarted","Data":"143b7c503f5adbcdbe4a70694125a4c0f3aa004a2a720d1ea3d80e2fa967d945"} Dec 02 17:17:32 crc kubenswrapper[4747]: I1202 17:17:32.880450 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" event={"ID":"b7b40fdd-ba3d-48fa-b374-279795ee9a44","Type":"ContainerStarted","Data":"b8372b6d45346943e43ccc4e26df2db738267609263d37effb2c4251980159ea"} Dec 02 17:17:32 crc kubenswrapper[4747]: I1202 17:17:32.902315 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" podStartSLOduration=2.639500765 podStartE2EDuration="2.902283823s" podCreationTimestamp="2025-12-02 17:17:30 +0000 UTC" firstStartedPulling="2025-12-02 17:17:31.758202911 +0000 UTC m=+2082.285091660" lastFinishedPulling="2025-12-02 17:17:32.020985969 +0000 UTC m=+2082.547874718" observedRunningTime="2025-12-02 17:17:32.898225938 +0000 UTC m=+2083.425114727" watchObservedRunningTime="2025-12-02 17:17:32.902283823 +0000 UTC m=+2083.429172612" Dec 02 17:17:41 crc kubenswrapper[4747]: I1202 17:17:41.961012 4747 generic.go:334] "Generic (PLEG): container finished" podID="b7b40fdd-ba3d-48fa-b374-279795ee9a44" containerID="b8372b6d45346943e43ccc4e26df2db738267609263d37effb2c4251980159ea" exitCode=0 Dec 02 17:17:41 crc kubenswrapper[4747]: I1202 17:17:41.961090 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" event={"ID":"b7b40fdd-ba3d-48fa-b374-279795ee9a44","Type":"ContainerDied","Data":"b8372b6d45346943e43ccc4e26df2db738267609263d37effb2c4251980159ea"} Dec 02 17:17:43 crc kubenswrapper[4747]: I1202 17:17:43.394737 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" Dec 02 17:17:43 crc kubenswrapper[4747]: I1202 17:17:43.517095 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b7b40fdd-ba3d-48fa-b374-279795ee9a44-ssh-key\") pod \"b7b40fdd-ba3d-48fa-b374-279795ee9a44\" (UID: \"b7b40fdd-ba3d-48fa-b374-279795ee9a44\") " Dec 02 17:17:43 crc kubenswrapper[4747]: I1202 17:17:43.517206 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7b40fdd-ba3d-48fa-b374-279795ee9a44-inventory\") pod \"b7b40fdd-ba3d-48fa-b374-279795ee9a44\" (UID: \"b7b40fdd-ba3d-48fa-b374-279795ee9a44\") " Dec 02 17:17:43 crc kubenswrapper[4747]: I1202 17:17:43.517244 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9vxt\" (UniqueName: \"kubernetes.io/projected/b7b40fdd-ba3d-48fa-b374-279795ee9a44-kube-api-access-r9vxt\") pod \"b7b40fdd-ba3d-48fa-b374-279795ee9a44\" (UID: \"b7b40fdd-ba3d-48fa-b374-279795ee9a44\") " Dec 02 17:17:43 crc kubenswrapper[4747]: I1202 17:17:43.523310 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7b40fdd-ba3d-48fa-b374-279795ee9a44-kube-api-access-r9vxt" (OuterVolumeSpecName: "kube-api-access-r9vxt") pod "b7b40fdd-ba3d-48fa-b374-279795ee9a44" (UID: "b7b40fdd-ba3d-48fa-b374-279795ee9a44"). InnerVolumeSpecName "kube-api-access-r9vxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:17:43 crc kubenswrapper[4747]: I1202 17:17:43.545077 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7b40fdd-ba3d-48fa-b374-279795ee9a44-inventory" (OuterVolumeSpecName: "inventory") pod "b7b40fdd-ba3d-48fa-b374-279795ee9a44" (UID: "b7b40fdd-ba3d-48fa-b374-279795ee9a44"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:17:43 crc kubenswrapper[4747]: I1202 17:17:43.545835 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7b40fdd-ba3d-48fa-b374-279795ee9a44-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b7b40fdd-ba3d-48fa-b374-279795ee9a44" (UID: "b7b40fdd-ba3d-48fa-b374-279795ee9a44"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:17:43 crc kubenswrapper[4747]: I1202 17:17:43.619898 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b7b40fdd-ba3d-48fa-b374-279795ee9a44-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 17:17:43 crc kubenswrapper[4747]: I1202 17:17:43.619979 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9vxt\" (UniqueName: \"kubernetes.io/projected/b7b40fdd-ba3d-48fa-b374-279795ee9a44-kube-api-access-r9vxt\") on node \"crc\" DevicePath \"\"" Dec 02 17:17:43 crc kubenswrapper[4747]: I1202 17:17:43.619990 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b7b40fdd-ba3d-48fa-b374-279795ee9a44-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:17:43 crc kubenswrapper[4747]: I1202 17:17:43.987227 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" event={"ID":"b7b40fdd-ba3d-48fa-b374-279795ee9a44","Type":"ContainerDied","Data":"143b7c503f5adbcdbe4a70694125a4c0f3aa004a2a720d1ea3d80e2fa967d945"} Dec 02 17:17:43 crc kubenswrapper[4747]: I1202 17:17:43.987566 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="143b7c503f5adbcdbe4a70694125a4c0f3aa004a2a720d1ea3d80e2fa967d945" Dec 02 17:17:43 crc kubenswrapper[4747]: I1202 17:17:43.987314 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.101118 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz"] Dec 02 17:17:44 crc kubenswrapper[4747]: E1202 17:17:44.101570 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7b40fdd-ba3d-48fa-b374-279795ee9a44" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.101589 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7b40fdd-ba3d-48fa-b374-279795ee9a44" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.101798 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7b40fdd-ba3d-48fa-b374-279795ee9a44" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.102491 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.105387 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.106819 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.106923 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mt9sq" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.107127 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.107286 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.107289 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.107441 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.107487 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.115082 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz"] Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.234861 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.235016 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.235217 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.235245 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.235299 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.235319 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.235350 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.235498 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.235696 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.235790 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.235815 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5tdz\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-kube-api-access-j5tdz\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.235895 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.235987 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.236029 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.338309 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.338421 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.338461 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5tdz\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-kube-api-access-j5tdz\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.338538 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.339549 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.339680 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.339745 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.339849 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.340147 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.340847 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.341142 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.341205 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.341271 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.341360 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.343796 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.344501 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.344536 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.345612 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.345662 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.347316 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.349287 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.350778 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.352513 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.353056 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.353249 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.355614 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.357353 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.371087 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5tdz\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-kube-api-access-j5tdz\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.428614 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:17:44 crc kubenswrapper[4747]: I1202 17:17:44.989825 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz"] Dec 02 17:17:46 crc kubenswrapper[4747]: I1202 17:17:46.014802 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" event={"ID":"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1","Type":"ContainerStarted","Data":"9207192310da1302bd531baf4445208b75d74c902cb7f4c9ea89372fee630be9"} Dec 02 17:17:46 crc kubenswrapper[4747]: I1202 17:17:46.015151 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" event={"ID":"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1","Type":"ContainerStarted","Data":"a042d1389ea51afbbaf3aecc9d7fc89e9af2bcd6d9416dc457f8da30340ac4a9"} Dec 02 17:17:46 crc kubenswrapper[4747]: I1202 17:17:46.036844 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" podStartSLOduration=1.862342342 podStartE2EDuration="2.036821414s" podCreationTimestamp="2025-12-02 17:17:44 +0000 UTC" firstStartedPulling="2025-12-02 17:17:44.994712056 +0000 UTC m=+2095.521600805" lastFinishedPulling="2025-12-02 17:17:45.169191128 +0000 UTC m=+2095.696079877" observedRunningTime="2025-12-02 17:17:46.033768857 +0000 UTC m=+2096.560657616" watchObservedRunningTime="2025-12-02 17:17:46.036821414 +0000 UTC m=+2096.563710173" Dec 02 17:18:24 crc kubenswrapper[4747]: I1202 17:18:24.416002 4747 generic.go:334] "Generic (PLEG): container finished" podID="a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" containerID="9207192310da1302bd531baf4445208b75d74c902cb7f4c9ea89372fee630be9" exitCode=0 Dec 02 17:18:24 crc kubenswrapper[4747]: I1202 17:18:24.416069 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" event={"ID":"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1","Type":"ContainerDied","Data":"9207192310da1302bd531baf4445208b75d74c902cb7f4c9ea89372fee630be9"} Dec 02 17:18:25 crc kubenswrapper[4747]: I1202 17:18:25.894079 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.020223 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-nova-combined-ca-bundle\") pod \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.020300 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-bootstrap-combined-ca-bundle\") pod \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.020333 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-repo-setup-combined-ca-bundle\") pod \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.020381 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-telemetry-combined-ca-bundle\") pod \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.020495 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-neutron-metadata-combined-ca-bundle\") pod \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.020564 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.020597 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-ssh-key\") pod \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.020634 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-ovn-combined-ca-bundle\") pod \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.020727 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.020781 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-ovn-default-certs-0\") pod \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.020836 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5tdz\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-kube-api-access-j5tdz\") pod \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.020888 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-inventory\") pod \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.021030 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-libvirt-combined-ca-bundle\") pod \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.021158 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\" (UID: \"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1\") " Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.029380 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" (UID: "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.032810 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" (UID: "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.033192 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" (UID: "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.035248 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" (UID: "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.037028 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" (UID: "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.037063 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" (UID: "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.037254 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" (UID: "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.037506 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" (UID: "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.038022 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" (UID: "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.038304 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" (UID: "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.049251 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" (UID: "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.053978 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-kube-api-access-j5tdz" (OuterVolumeSpecName: "kube-api-access-j5tdz") pod "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" (UID: "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1"). InnerVolumeSpecName "kube-api-access-j5tdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.072657 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-inventory" (OuterVolumeSpecName: "inventory") pod "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" (UID: "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.098290 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" (UID: "a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.124361 4747 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.124437 4747 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.124466 4747 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.124493 4747 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.124526 4747 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.124553 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.124581 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.124606 4747 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.124635 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.124696 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.124725 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5tdz\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-kube-api-access-j5tdz\") on node \"crc\" DevicePath \"\"" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.124749 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.124786 4747 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.124813 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.441510 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" event={"ID":"a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1","Type":"ContainerDied","Data":"a042d1389ea51afbbaf3aecc9d7fc89e9af2bcd6d9416dc457f8da30340ac4a9"} Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.441546 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a042d1389ea51afbbaf3aecc9d7fc89e9af2bcd6d9416dc457f8da30340ac4a9" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.441944 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.562195 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b"] Dec 02 17:18:26 crc kubenswrapper[4747]: E1202 17:18:26.562602 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.562621 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.562840 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.563499 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.566642 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.566981 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.568086 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.568394 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.568559 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mt9sq" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.575387 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b"] Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.634130 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rml7b\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.634194 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gzkk\" (UniqueName: \"kubernetes.io/projected/5a3d9657-8162-4edb-88bc-af303d558b2c-kube-api-access-2gzkk\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rml7b\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.634219 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rml7b\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.634305 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rml7b\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.634559 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5a3d9657-8162-4edb-88bc-af303d558b2c-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rml7b\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.736118 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5a3d9657-8162-4edb-88bc-af303d558b2c-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rml7b\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.736253 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rml7b\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.736279 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gzkk\" (UniqueName: \"kubernetes.io/projected/5a3d9657-8162-4edb-88bc-af303d558b2c-kube-api-access-2gzkk\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rml7b\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.736296 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rml7b\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.736317 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rml7b\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.737057 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5a3d9657-8162-4edb-88bc-af303d558b2c-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rml7b\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.742099 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rml7b\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.742308 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rml7b\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.743256 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rml7b\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.752970 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gzkk\" (UniqueName: \"kubernetes.io/projected/5a3d9657-8162-4edb-88bc-af303d558b2c-kube-api-access-2gzkk\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rml7b\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:18:26 crc kubenswrapper[4747]: I1202 17:18:26.888665 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:18:27 crc kubenswrapper[4747]: I1202 17:18:27.898187 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b"] Dec 02 17:18:28 crc kubenswrapper[4747]: I1202 17:18:28.469011 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" event={"ID":"5a3d9657-8162-4edb-88bc-af303d558b2c","Type":"ContainerStarted","Data":"cbdd48efb73f09c4e1bc070a2ef75a848790b072e2f8adf52bb4ef6350a9815b"} Dec 02 17:18:28 crc kubenswrapper[4747]: I1202 17:18:28.469319 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" event={"ID":"5a3d9657-8162-4edb-88bc-af303d558b2c","Type":"ContainerStarted","Data":"733f0cc6f606f7ea594ef54879ce06571f3aa33dc2a342e14a80a114aadca482"} Dec 02 17:18:28 crc kubenswrapper[4747]: I1202 17:18:28.492062 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" podStartSLOduration=2.305885222 podStartE2EDuration="2.492044847s" podCreationTimestamp="2025-12-02 17:18:26 +0000 UTC" firstStartedPulling="2025-12-02 17:18:27.906260564 +0000 UTC m=+2138.433149313" lastFinishedPulling="2025-12-02 17:18:28.092420149 +0000 UTC m=+2138.619308938" observedRunningTime="2025-12-02 17:18:28.489125904 +0000 UTC m=+2139.016014653" watchObservedRunningTime="2025-12-02 17:18:28.492044847 +0000 UTC m=+2139.018933596" Dec 02 17:18:31 crc kubenswrapper[4747]: I1202 17:18:31.794688 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:18:31 crc kubenswrapper[4747]: I1202 17:18:31.795230 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:19:01 crc kubenswrapper[4747]: I1202 17:19:01.795823 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:19:01 crc kubenswrapper[4747]: I1202 17:19:01.796386 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:19:30 crc kubenswrapper[4747]: I1202 17:19:30.265325 4747 generic.go:334] "Generic (PLEG): container finished" podID="5a3d9657-8162-4edb-88bc-af303d558b2c" containerID="cbdd48efb73f09c4e1bc070a2ef75a848790b072e2f8adf52bb4ef6350a9815b" exitCode=0 Dec 02 17:19:30 crc kubenswrapper[4747]: I1202 17:19:30.265477 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" event={"ID":"5a3d9657-8162-4edb-88bc-af303d558b2c","Type":"ContainerDied","Data":"cbdd48efb73f09c4e1bc070a2ef75a848790b072e2f8adf52bb4ef6350a9815b"} Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.721933 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.795855 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.795967 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.796025 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.796921 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ee94c253b80a66198a4fcfc6f17914c7c8d36b3792d805d2a1e16be971d427a2"} pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.796984 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" containerID="cri-o://ee94c253b80a66198a4fcfc6f17914c7c8d36b3792d805d2a1e16be971d427a2" gracePeriod=600 Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.868256 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5a3d9657-8162-4edb-88bc-af303d558b2c-ovncontroller-config-0\") pod \"5a3d9657-8162-4edb-88bc-af303d558b2c\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.868326 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-ssh-key\") pod \"5a3d9657-8162-4edb-88bc-af303d558b2c\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.868379 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gzkk\" (UniqueName: \"kubernetes.io/projected/5a3d9657-8162-4edb-88bc-af303d558b2c-kube-api-access-2gzkk\") pod \"5a3d9657-8162-4edb-88bc-af303d558b2c\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.868458 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-ovn-combined-ca-bundle\") pod \"5a3d9657-8162-4edb-88bc-af303d558b2c\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.868508 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-inventory\") pod \"5a3d9657-8162-4edb-88bc-af303d558b2c\" (UID: \"5a3d9657-8162-4edb-88bc-af303d558b2c\") " Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.874550 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a3d9657-8162-4edb-88bc-af303d558b2c-kube-api-access-2gzkk" (OuterVolumeSpecName: "kube-api-access-2gzkk") pod "5a3d9657-8162-4edb-88bc-af303d558b2c" (UID: "5a3d9657-8162-4edb-88bc-af303d558b2c"). InnerVolumeSpecName "kube-api-access-2gzkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.878326 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "5a3d9657-8162-4edb-88bc-af303d558b2c" (UID: "5a3d9657-8162-4edb-88bc-af303d558b2c"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.899784 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-inventory" (OuterVolumeSpecName: "inventory") pod "5a3d9657-8162-4edb-88bc-af303d558b2c" (UID: "5a3d9657-8162-4edb-88bc-af303d558b2c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.909426 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a3d9657-8162-4edb-88bc-af303d558b2c-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "5a3d9657-8162-4edb-88bc-af303d558b2c" (UID: "5a3d9657-8162-4edb-88bc-af303d558b2c"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.913157 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5a3d9657-8162-4edb-88bc-af303d558b2c" (UID: "5a3d9657-8162-4edb-88bc-af303d558b2c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.970646 4747 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5a3d9657-8162-4edb-88bc-af303d558b2c-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.971079 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.971097 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gzkk\" (UniqueName: \"kubernetes.io/projected/5a3d9657-8162-4edb-88bc-af303d558b2c-kube-api-access-2gzkk\") on node \"crc\" DevicePath \"\"" Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.972193 4747 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:19:31 crc kubenswrapper[4747]: I1202 17:19:31.972220 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a3d9657-8162-4edb-88bc-af303d558b2c-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.283952 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" event={"ID":"5a3d9657-8162-4edb-88bc-af303d558b2c","Type":"ContainerDied","Data":"733f0cc6f606f7ea594ef54879ce06571f3aa33dc2a342e14a80a114aadca482"} Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.284507 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="733f0cc6f606f7ea594ef54879ce06571f3aa33dc2a342e14a80a114aadca482" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.283973 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rml7b" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.286653 4747 generic.go:334] "Generic (PLEG): container finished" podID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerID="ee94c253b80a66198a4fcfc6f17914c7c8d36b3792d805d2a1e16be971d427a2" exitCode=0 Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.286682 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerDied","Data":"ee94c253b80a66198a4fcfc6f17914c7c8d36b3792d805d2a1e16be971d427a2"} Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.286701 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d"} Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.286720 4747 scope.go:117] "RemoveContainer" containerID="cfd9377d7f29669cc03efa20a0cfc1b16ca801b6725d849c8208efa6d8051705" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.432732 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc"] Dec 02 17:19:32 crc kubenswrapper[4747]: E1202 17:19:32.433144 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a3d9657-8162-4edb-88bc-af303d558b2c" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.433161 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a3d9657-8162-4edb-88bc-af303d558b2c" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.433381 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a3d9657-8162-4edb-88bc-af303d558b2c" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.433970 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.436366 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.436383 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.436430 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.436482 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.436735 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.438218 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mt9sq" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.449372 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc"] Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.629090 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.629494 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.629584 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nqsk\" (UniqueName: \"kubernetes.io/projected/61785f6a-0ba1-41a6-bff7-2558d21779af-kube-api-access-7nqsk\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.629717 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.629796 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.629888 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.732025 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.732430 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.732530 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.732557 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.732577 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nqsk\" (UniqueName: \"kubernetes.io/projected/61785f6a-0ba1-41a6-bff7-2558d21779af-kube-api-access-7nqsk\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.732660 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.738879 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.739347 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.739881 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.740703 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.747584 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.751535 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nqsk\" (UniqueName: \"kubernetes.io/projected/61785f6a-0ba1-41a6-bff7-2558d21779af-kube-api-access-7nqsk\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:32 crc kubenswrapper[4747]: I1202 17:19:32.796503 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:19:33 crc kubenswrapper[4747]: W1202 17:19:33.395078 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61785f6a_0ba1_41a6_bff7_2558d21779af.slice/crio-366a3db890142ee55771d7ce81ab6305a05b451ba3bc0e44e20cbd66d9d17fef WatchSource:0}: Error finding container 366a3db890142ee55771d7ce81ab6305a05b451ba3bc0e44e20cbd66d9d17fef: Status 404 returned error can't find the container with id 366a3db890142ee55771d7ce81ab6305a05b451ba3bc0e44e20cbd66d9d17fef Dec 02 17:19:33 crc kubenswrapper[4747]: I1202 17:19:33.395711 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc"] Dec 02 17:19:34 crc kubenswrapper[4747]: I1202 17:19:34.316411 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" event={"ID":"61785f6a-0ba1-41a6-bff7-2558d21779af","Type":"ContainerStarted","Data":"e4ab8722064a2559b9a95a2aa69aaa9e869ec29a8adbac8fa16acdc9754aaa11"} Dec 02 17:19:34 crc kubenswrapper[4747]: I1202 17:19:34.316754 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" event={"ID":"61785f6a-0ba1-41a6-bff7-2558d21779af","Type":"ContainerStarted","Data":"366a3db890142ee55771d7ce81ab6305a05b451ba3bc0e44e20cbd66d9d17fef"} Dec 02 17:19:34 crc kubenswrapper[4747]: I1202 17:19:34.345898 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" podStartSLOduration=2.161989799 podStartE2EDuration="2.345879119s" podCreationTimestamp="2025-12-02 17:19:32 +0000 UTC" firstStartedPulling="2025-12-02 17:19:33.575785874 +0000 UTC m=+2204.102674623" lastFinishedPulling="2025-12-02 17:19:33.759675174 +0000 UTC m=+2204.286563943" observedRunningTime="2025-12-02 17:19:34.340259108 +0000 UTC m=+2204.867147857" watchObservedRunningTime="2025-12-02 17:19:34.345879119 +0000 UTC m=+2204.872767878" Dec 02 17:20:07 crc kubenswrapper[4747]: I1202 17:20:07.026829 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t9gmc"] Dec 02 17:20:07 crc kubenswrapper[4747]: I1202 17:20:07.032996 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t9gmc" Dec 02 17:20:07 crc kubenswrapper[4747]: I1202 17:20:07.050853 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t9gmc"] Dec 02 17:20:07 crc kubenswrapper[4747]: I1202 17:20:07.133715 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8skc\" (UniqueName: \"kubernetes.io/projected/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-kube-api-access-d8skc\") pod \"certified-operators-t9gmc\" (UID: \"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107\") " pod="openshift-marketplace/certified-operators-t9gmc" Dec 02 17:20:07 crc kubenswrapper[4747]: I1202 17:20:07.133784 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-utilities\") pod \"certified-operators-t9gmc\" (UID: \"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107\") " pod="openshift-marketplace/certified-operators-t9gmc" Dec 02 17:20:07 crc kubenswrapper[4747]: I1202 17:20:07.133811 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-catalog-content\") pod \"certified-operators-t9gmc\" (UID: \"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107\") " pod="openshift-marketplace/certified-operators-t9gmc" Dec 02 17:20:07 crc kubenswrapper[4747]: I1202 17:20:07.235541 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8skc\" (UniqueName: \"kubernetes.io/projected/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-kube-api-access-d8skc\") pod \"certified-operators-t9gmc\" (UID: \"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107\") " pod="openshift-marketplace/certified-operators-t9gmc" Dec 02 17:20:07 crc kubenswrapper[4747]: I1202 17:20:07.235626 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-utilities\") pod \"certified-operators-t9gmc\" (UID: \"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107\") " pod="openshift-marketplace/certified-operators-t9gmc" Dec 02 17:20:07 crc kubenswrapper[4747]: I1202 17:20:07.235664 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-catalog-content\") pod \"certified-operators-t9gmc\" (UID: \"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107\") " pod="openshift-marketplace/certified-operators-t9gmc" Dec 02 17:20:07 crc kubenswrapper[4747]: I1202 17:20:07.236309 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-catalog-content\") pod \"certified-operators-t9gmc\" (UID: \"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107\") " pod="openshift-marketplace/certified-operators-t9gmc" Dec 02 17:20:07 crc kubenswrapper[4747]: I1202 17:20:07.236427 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-utilities\") pod \"certified-operators-t9gmc\" (UID: \"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107\") " pod="openshift-marketplace/certified-operators-t9gmc" Dec 02 17:20:07 crc kubenswrapper[4747]: I1202 17:20:07.256265 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8skc\" (UniqueName: \"kubernetes.io/projected/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-kube-api-access-d8skc\") pod \"certified-operators-t9gmc\" (UID: \"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107\") " pod="openshift-marketplace/certified-operators-t9gmc" Dec 02 17:20:07 crc kubenswrapper[4747]: I1202 17:20:07.414071 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t9gmc" Dec 02 17:20:07 crc kubenswrapper[4747]: I1202 17:20:07.925984 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t9gmc"] Dec 02 17:20:08 crc kubenswrapper[4747]: I1202 17:20:08.883568 4747 generic.go:334] "Generic (PLEG): container finished" podID="2d851cdd-6ab3-4ecf-9a46-3bd24adb5107" containerID="8409d86685f567ad44a0001a1faa62db441b8990add8cfb92db31c2fac299a65" exitCode=0 Dec 02 17:20:08 crc kubenswrapper[4747]: I1202 17:20:08.883851 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t9gmc" event={"ID":"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107","Type":"ContainerDied","Data":"8409d86685f567ad44a0001a1faa62db441b8990add8cfb92db31c2fac299a65"} Dec 02 17:20:08 crc kubenswrapper[4747]: I1202 17:20:08.883876 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t9gmc" event={"ID":"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107","Type":"ContainerStarted","Data":"3f7b85abe2c1816f37b841b423a7521dc80c46e79740ac05aeb57b8272e823fe"} Dec 02 17:20:09 crc kubenswrapper[4747]: I1202 17:20:09.903442 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t9gmc" event={"ID":"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107","Type":"ContainerStarted","Data":"6868d2987a8e8014540bd6e3e352397e30e3dd8f54f4ae3c556a7b261df4e92e"} Dec 02 17:20:10 crc kubenswrapper[4747]: I1202 17:20:10.913522 4747 generic.go:334] "Generic (PLEG): container finished" podID="2d851cdd-6ab3-4ecf-9a46-3bd24adb5107" containerID="6868d2987a8e8014540bd6e3e352397e30e3dd8f54f4ae3c556a7b261df4e92e" exitCode=0 Dec 02 17:20:10 crc kubenswrapper[4747]: I1202 17:20:10.913631 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t9gmc" event={"ID":"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107","Type":"ContainerDied","Data":"6868d2987a8e8014540bd6e3e352397e30e3dd8f54f4ae3c556a7b261df4e92e"} Dec 02 17:20:11 crc kubenswrapper[4747]: I1202 17:20:11.924027 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t9gmc" event={"ID":"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107","Type":"ContainerStarted","Data":"a950405c9ee26bcc4d5e66261ac5a485cc3fd33a4cbc6c7a824cb9ba697b59e7"} Dec 02 17:20:11 crc kubenswrapper[4747]: I1202 17:20:11.953550 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t9gmc" podStartSLOduration=2.400557574 podStartE2EDuration="4.953534666s" podCreationTimestamp="2025-12-02 17:20:07 +0000 UTC" firstStartedPulling="2025-12-02 17:20:08.885552568 +0000 UTC m=+2239.412441317" lastFinishedPulling="2025-12-02 17:20:11.43852962 +0000 UTC m=+2241.965418409" observedRunningTime="2025-12-02 17:20:11.950087808 +0000 UTC m=+2242.476976557" watchObservedRunningTime="2025-12-02 17:20:11.953534666 +0000 UTC m=+2242.480423415" Dec 02 17:20:17 crc kubenswrapper[4747]: I1202 17:20:17.415133 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t9gmc" Dec 02 17:20:17 crc kubenswrapper[4747]: I1202 17:20:17.415680 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t9gmc" Dec 02 17:20:17 crc kubenswrapper[4747]: I1202 17:20:17.498249 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t9gmc" Dec 02 17:20:18 crc kubenswrapper[4747]: I1202 17:20:18.032386 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t9gmc" Dec 02 17:20:18 crc kubenswrapper[4747]: I1202 17:20:18.083344 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t9gmc"] Dec 02 17:20:19 crc kubenswrapper[4747]: I1202 17:20:19.997799 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-t9gmc" podUID="2d851cdd-6ab3-4ecf-9a46-3bd24adb5107" containerName="registry-server" containerID="cri-o://a950405c9ee26bcc4d5e66261ac5a485cc3fd33a4cbc6c7a824cb9ba697b59e7" gracePeriod=2 Dec 02 17:20:21 crc kubenswrapper[4747]: I1202 17:20:21.009575 4747 generic.go:334] "Generic (PLEG): container finished" podID="2d851cdd-6ab3-4ecf-9a46-3bd24adb5107" containerID="a950405c9ee26bcc4d5e66261ac5a485cc3fd33a4cbc6c7a824cb9ba697b59e7" exitCode=0 Dec 02 17:20:21 crc kubenswrapper[4747]: I1202 17:20:21.009839 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t9gmc" event={"ID":"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107","Type":"ContainerDied","Data":"a950405c9ee26bcc4d5e66261ac5a485cc3fd33a4cbc6c7a824cb9ba697b59e7"} Dec 02 17:20:21 crc kubenswrapper[4747]: I1202 17:20:21.555630 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t9gmc" Dec 02 17:20:21 crc kubenswrapper[4747]: I1202 17:20:21.616393 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8skc\" (UniqueName: \"kubernetes.io/projected/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-kube-api-access-d8skc\") pod \"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107\" (UID: \"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107\") " Dec 02 17:20:21 crc kubenswrapper[4747]: I1202 17:20:21.616487 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-catalog-content\") pod \"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107\" (UID: \"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107\") " Dec 02 17:20:21 crc kubenswrapper[4747]: I1202 17:20:21.616814 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-utilities\") pod \"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107\" (UID: \"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107\") " Dec 02 17:20:21 crc kubenswrapper[4747]: I1202 17:20:21.617738 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-utilities" (OuterVolumeSpecName: "utilities") pod "2d851cdd-6ab3-4ecf-9a46-3bd24adb5107" (UID: "2d851cdd-6ab3-4ecf-9a46-3bd24adb5107"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:20:21 crc kubenswrapper[4747]: I1202 17:20:21.623390 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-kube-api-access-d8skc" (OuterVolumeSpecName: "kube-api-access-d8skc") pod "2d851cdd-6ab3-4ecf-9a46-3bd24adb5107" (UID: "2d851cdd-6ab3-4ecf-9a46-3bd24adb5107"). InnerVolumeSpecName "kube-api-access-d8skc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:20:21 crc kubenswrapper[4747]: I1202 17:20:21.671538 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2d851cdd-6ab3-4ecf-9a46-3bd24adb5107" (UID: "2d851cdd-6ab3-4ecf-9a46-3bd24adb5107"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:20:21 crc kubenswrapper[4747]: I1202 17:20:21.719206 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:20:21 crc kubenswrapper[4747]: I1202 17:20:21.719237 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8skc\" (UniqueName: \"kubernetes.io/projected/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-kube-api-access-d8skc\") on node \"crc\" DevicePath \"\"" Dec 02 17:20:21 crc kubenswrapper[4747]: I1202 17:20:21.719247 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:20:22 crc kubenswrapper[4747]: I1202 17:20:22.023076 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t9gmc" event={"ID":"2d851cdd-6ab3-4ecf-9a46-3bd24adb5107","Type":"ContainerDied","Data":"3f7b85abe2c1816f37b841b423a7521dc80c46e79740ac05aeb57b8272e823fe"} Dec 02 17:20:22 crc kubenswrapper[4747]: I1202 17:20:22.023144 4747 scope.go:117] "RemoveContainer" containerID="a950405c9ee26bcc4d5e66261ac5a485cc3fd33a4cbc6c7a824cb9ba697b59e7" Dec 02 17:20:22 crc kubenswrapper[4747]: I1202 17:20:22.023172 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t9gmc" Dec 02 17:20:22 crc kubenswrapper[4747]: I1202 17:20:22.047739 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t9gmc"] Dec 02 17:20:22 crc kubenswrapper[4747]: I1202 17:20:22.056551 4747 scope.go:117] "RemoveContainer" containerID="6868d2987a8e8014540bd6e3e352397e30e3dd8f54f4ae3c556a7b261df4e92e" Dec 02 17:20:22 crc kubenswrapper[4747]: I1202 17:20:22.058409 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-t9gmc"] Dec 02 17:20:22 crc kubenswrapper[4747]: I1202 17:20:22.103454 4747 scope.go:117] "RemoveContainer" containerID="8409d86685f567ad44a0001a1faa62db441b8990add8cfb92db31c2fac299a65" Dec 02 17:20:23 crc kubenswrapper[4747]: I1202 17:20:23.033983 4747 generic.go:334] "Generic (PLEG): container finished" podID="61785f6a-0ba1-41a6-bff7-2558d21779af" containerID="e4ab8722064a2559b9a95a2aa69aaa9e869ec29a8adbac8fa16acdc9754aaa11" exitCode=0 Dec 02 17:20:23 crc kubenswrapper[4747]: I1202 17:20:23.034083 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" event={"ID":"61785f6a-0ba1-41a6-bff7-2558d21779af","Type":"ContainerDied","Data":"e4ab8722064a2559b9a95a2aa69aaa9e869ec29a8adbac8fa16acdc9754aaa11"} Dec 02 17:20:23 crc kubenswrapper[4747]: I1202 17:20:23.778829 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d851cdd-6ab3-4ecf-9a46-3bd24adb5107" path="/var/lib/kubelet/pods/2d851cdd-6ab3-4ecf-9a46-3bd24adb5107/volumes" Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.498002 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.678769 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-neutron-metadata-combined-ca-bundle\") pod \"61785f6a-0ba1-41a6-bff7-2558d21779af\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.680565 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7nqsk\" (UniqueName: \"kubernetes.io/projected/61785f6a-0ba1-41a6-bff7-2558d21779af-kube-api-access-7nqsk\") pod \"61785f6a-0ba1-41a6-bff7-2558d21779af\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.680887 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-ssh-key\") pod \"61785f6a-0ba1-41a6-bff7-2558d21779af\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.681135 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-inventory\") pod \"61785f6a-0ba1-41a6-bff7-2558d21779af\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.681174 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-neutron-ovn-metadata-agent-neutron-config-0\") pod \"61785f6a-0ba1-41a6-bff7-2558d21779af\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.681213 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-nova-metadata-neutron-config-0\") pod \"61785f6a-0ba1-41a6-bff7-2558d21779af\" (UID: \"61785f6a-0ba1-41a6-bff7-2558d21779af\") " Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.684760 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "61785f6a-0ba1-41a6-bff7-2558d21779af" (UID: "61785f6a-0ba1-41a6-bff7-2558d21779af"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.685184 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61785f6a-0ba1-41a6-bff7-2558d21779af-kube-api-access-7nqsk" (OuterVolumeSpecName: "kube-api-access-7nqsk") pod "61785f6a-0ba1-41a6-bff7-2558d21779af" (UID: "61785f6a-0ba1-41a6-bff7-2558d21779af"). InnerVolumeSpecName "kube-api-access-7nqsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.706610 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-inventory" (OuterVolumeSpecName: "inventory") pod "61785f6a-0ba1-41a6-bff7-2558d21779af" (UID: "61785f6a-0ba1-41a6-bff7-2558d21779af"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.712359 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "61785f6a-0ba1-41a6-bff7-2558d21779af" (UID: "61785f6a-0ba1-41a6-bff7-2558d21779af"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.718803 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "61785f6a-0ba1-41a6-bff7-2558d21779af" (UID: "61785f6a-0ba1-41a6-bff7-2558d21779af"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.725833 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "61785f6a-0ba1-41a6-bff7-2558d21779af" (UID: "61785f6a-0ba1-41a6-bff7-2558d21779af"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.783475 4747 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.783512 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7nqsk\" (UniqueName: \"kubernetes.io/projected/61785f6a-0ba1-41a6-bff7-2558d21779af-kube-api-access-7nqsk\") on node \"crc\" DevicePath \"\"" Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.783524 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.783534 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.783546 4747 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:20:24 crc kubenswrapper[4747]: I1202 17:20:24.783558 4747 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/61785f6a-0ba1-41a6-bff7-2558d21779af-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.057677 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" event={"ID":"61785f6a-0ba1-41a6-bff7-2558d21779af","Type":"ContainerDied","Data":"366a3db890142ee55771d7ce81ab6305a05b451ba3bc0e44e20cbd66d9d17fef"} Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.057723 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="366a3db890142ee55771d7ce81ab6305a05b451ba3bc0e44e20cbd66d9d17fef" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.057771 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.264186 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk"] Dec 02 17:20:25 crc kubenswrapper[4747]: E1202 17:20:25.264668 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d851cdd-6ab3-4ecf-9a46-3bd24adb5107" containerName="extract-utilities" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.264692 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d851cdd-6ab3-4ecf-9a46-3bd24adb5107" containerName="extract-utilities" Dec 02 17:20:25 crc kubenswrapper[4747]: E1202 17:20:25.264722 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d851cdd-6ab3-4ecf-9a46-3bd24adb5107" containerName="registry-server" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.264732 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d851cdd-6ab3-4ecf-9a46-3bd24adb5107" containerName="registry-server" Dec 02 17:20:25 crc kubenswrapper[4747]: E1202 17:20:25.264743 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d851cdd-6ab3-4ecf-9a46-3bd24adb5107" containerName="extract-content" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.264752 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d851cdd-6ab3-4ecf-9a46-3bd24adb5107" containerName="extract-content" Dec 02 17:20:25 crc kubenswrapper[4747]: E1202 17:20:25.264785 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61785f6a-0ba1-41a6-bff7-2558d21779af" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.264796 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="61785f6a-0ba1-41a6-bff7-2558d21779af" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.265037 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d851cdd-6ab3-4ecf-9a46-3bd24adb5107" containerName="registry-server" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.265058 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="61785f6a-0ba1-41a6-bff7-2558d21779af" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.266828 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.272216 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.272410 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.272449 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mt9sq" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.272702 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.272710 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.274797 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk"] Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.296508 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46r77\" (UniqueName: \"kubernetes.io/projected/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-kube-api-access-46r77\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.296576 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.296639 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.296660 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.296686 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.399626 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46r77\" (UniqueName: \"kubernetes.io/projected/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-kube-api-access-46r77\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.400053 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.400120 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.400151 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.400182 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.404360 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.404447 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.405243 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.410102 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.419737 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46r77\" (UniqueName: \"kubernetes.io/projected/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-kube-api-access-46r77\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:20:25 crc kubenswrapper[4747]: I1202 17:20:25.600327 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:20:26 crc kubenswrapper[4747]: I1202 17:20:26.137045 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk"] Dec 02 17:20:27 crc kubenswrapper[4747]: I1202 17:20:27.079088 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" event={"ID":"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8","Type":"ContainerStarted","Data":"8783296e556ce6a49b0add732c39b554da384921a5cc5a288a46b674880739f2"} Dec 02 17:20:27 crc kubenswrapper[4747]: I1202 17:20:27.079430 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" event={"ID":"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8","Type":"ContainerStarted","Data":"d8051e125be23b1a5ae20a874c0661f9bc58ce5b0dd1f9c574cd16bff9323812"} Dec 02 17:20:27 crc kubenswrapper[4747]: I1202 17:20:27.099247 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" podStartSLOduration=1.772906855 podStartE2EDuration="2.099227934s" podCreationTimestamp="2025-12-02 17:20:25 +0000 UTC" firstStartedPulling="2025-12-02 17:20:26.135632304 +0000 UTC m=+2256.662521053" lastFinishedPulling="2025-12-02 17:20:26.461953383 +0000 UTC m=+2256.988842132" observedRunningTime="2025-12-02 17:20:27.098963696 +0000 UTC m=+2257.625852445" watchObservedRunningTime="2025-12-02 17:20:27.099227934 +0000 UTC m=+2257.626116693" Dec 02 17:21:01 crc kubenswrapper[4747]: I1202 17:21:01.307505 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-t7m9k"] Dec 02 17:21:01 crc kubenswrapper[4747]: I1202 17:21:01.311493 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t7m9k" Dec 02 17:21:01 crc kubenswrapper[4747]: I1202 17:21:01.322317 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-t7m9k"] Dec 02 17:21:01 crc kubenswrapper[4747]: I1202 17:21:01.416833 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72919804-668b-4ad9-a957-8e0070622cfa-utilities\") pod \"community-operators-t7m9k\" (UID: \"72919804-668b-4ad9-a957-8e0070622cfa\") " pod="openshift-marketplace/community-operators-t7m9k" Dec 02 17:21:01 crc kubenswrapper[4747]: I1202 17:21:01.416926 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72919804-668b-4ad9-a957-8e0070622cfa-catalog-content\") pod \"community-operators-t7m9k\" (UID: \"72919804-668b-4ad9-a957-8e0070622cfa\") " pod="openshift-marketplace/community-operators-t7m9k" Dec 02 17:21:01 crc kubenswrapper[4747]: I1202 17:21:01.417383 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6vvl\" (UniqueName: \"kubernetes.io/projected/72919804-668b-4ad9-a957-8e0070622cfa-kube-api-access-l6vvl\") pod \"community-operators-t7m9k\" (UID: \"72919804-668b-4ad9-a957-8e0070622cfa\") " pod="openshift-marketplace/community-operators-t7m9k" Dec 02 17:21:01 crc kubenswrapper[4747]: I1202 17:21:01.518667 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72919804-668b-4ad9-a957-8e0070622cfa-utilities\") pod \"community-operators-t7m9k\" (UID: \"72919804-668b-4ad9-a957-8e0070622cfa\") " pod="openshift-marketplace/community-operators-t7m9k" Dec 02 17:21:01 crc kubenswrapper[4747]: I1202 17:21:01.518730 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72919804-668b-4ad9-a957-8e0070622cfa-catalog-content\") pod \"community-operators-t7m9k\" (UID: \"72919804-668b-4ad9-a957-8e0070622cfa\") " pod="openshift-marketplace/community-operators-t7m9k" Dec 02 17:21:01 crc kubenswrapper[4747]: I1202 17:21:01.518768 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6vvl\" (UniqueName: \"kubernetes.io/projected/72919804-668b-4ad9-a957-8e0070622cfa-kube-api-access-l6vvl\") pod \"community-operators-t7m9k\" (UID: \"72919804-668b-4ad9-a957-8e0070622cfa\") " pod="openshift-marketplace/community-operators-t7m9k" Dec 02 17:21:01 crc kubenswrapper[4747]: I1202 17:21:01.519345 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72919804-668b-4ad9-a957-8e0070622cfa-utilities\") pod \"community-operators-t7m9k\" (UID: \"72919804-668b-4ad9-a957-8e0070622cfa\") " pod="openshift-marketplace/community-operators-t7m9k" Dec 02 17:21:01 crc kubenswrapper[4747]: I1202 17:21:01.519375 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72919804-668b-4ad9-a957-8e0070622cfa-catalog-content\") pod \"community-operators-t7m9k\" (UID: \"72919804-668b-4ad9-a957-8e0070622cfa\") " pod="openshift-marketplace/community-operators-t7m9k" Dec 02 17:21:01 crc kubenswrapper[4747]: I1202 17:21:01.543521 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6vvl\" (UniqueName: \"kubernetes.io/projected/72919804-668b-4ad9-a957-8e0070622cfa-kube-api-access-l6vvl\") pod \"community-operators-t7m9k\" (UID: \"72919804-668b-4ad9-a957-8e0070622cfa\") " pod="openshift-marketplace/community-operators-t7m9k" Dec 02 17:21:01 crc kubenswrapper[4747]: I1202 17:21:01.639349 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t7m9k" Dec 02 17:21:02 crc kubenswrapper[4747]: I1202 17:21:02.142212 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-t7m9k"] Dec 02 17:21:02 crc kubenswrapper[4747]: W1202 17:21:02.147685 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72919804_668b_4ad9_a957_8e0070622cfa.slice/crio-20a6d15fd4897a295873ecfeaf69101bbd9690ed36f4838c238cbea4aa1bead6 WatchSource:0}: Error finding container 20a6d15fd4897a295873ecfeaf69101bbd9690ed36f4838c238cbea4aa1bead6: Status 404 returned error can't find the container with id 20a6d15fd4897a295873ecfeaf69101bbd9690ed36f4838c238cbea4aa1bead6 Dec 02 17:21:02 crc kubenswrapper[4747]: I1202 17:21:02.464385 4747 generic.go:334] "Generic (PLEG): container finished" podID="72919804-668b-4ad9-a957-8e0070622cfa" containerID="27fa683b2f9f3128d4941562c9286b7c481fc6d531e09bf683694a6cc6a158f0" exitCode=0 Dec 02 17:21:02 crc kubenswrapper[4747]: I1202 17:21:02.464436 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t7m9k" event={"ID":"72919804-668b-4ad9-a957-8e0070622cfa","Type":"ContainerDied","Data":"27fa683b2f9f3128d4941562c9286b7c481fc6d531e09bf683694a6cc6a158f0"} Dec 02 17:21:02 crc kubenswrapper[4747]: I1202 17:21:02.464653 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t7m9k" event={"ID":"72919804-668b-4ad9-a957-8e0070622cfa","Type":"ContainerStarted","Data":"20a6d15fd4897a295873ecfeaf69101bbd9690ed36f4838c238cbea4aa1bead6"} Dec 02 17:21:04 crc kubenswrapper[4747]: I1202 17:21:04.487382 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t7m9k" event={"ID":"72919804-668b-4ad9-a957-8e0070622cfa","Type":"ContainerStarted","Data":"dc9cafff1ce82eeb399d7e93639e38e13c94d278031e4a34ffa7e73accc2e1b6"} Dec 02 17:21:05 crc kubenswrapper[4747]: I1202 17:21:05.501298 4747 generic.go:334] "Generic (PLEG): container finished" podID="72919804-668b-4ad9-a957-8e0070622cfa" containerID="dc9cafff1ce82eeb399d7e93639e38e13c94d278031e4a34ffa7e73accc2e1b6" exitCode=0 Dec 02 17:21:05 crc kubenswrapper[4747]: I1202 17:21:05.501339 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t7m9k" event={"ID":"72919804-668b-4ad9-a957-8e0070622cfa","Type":"ContainerDied","Data":"dc9cafff1ce82eeb399d7e93639e38e13c94d278031e4a34ffa7e73accc2e1b6"} Dec 02 17:21:06 crc kubenswrapper[4747]: I1202 17:21:06.514613 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t7m9k" event={"ID":"72919804-668b-4ad9-a957-8e0070622cfa","Type":"ContainerStarted","Data":"fd83db387dd2766c5f2f5469625bf071110c04373532f42409d3374e5c4fbb9f"} Dec 02 17:21:06 crc kubenswrapper[4747]: I1202 17:21:06.536033 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-t7m9k" podStartSLOduration=2.037703234 podStartE2EDuration="5.536013355s" podCreationTimestamp="2025-12-02 17:21:01 +0000 UTC" firstStartedPulling="2025-12-02 17:21:02.465901189 +0000 UTC m=+2292.992789938" lastFinishedPulling="2025-12-02 17:21:05.96421132 +0000 UTC m=+2296.491100059" observedRunningTime="2025-12-02 17:21:06.534599905 +0000 UTC m=+2297.061488654" watchObservedRunningTime="2025-12-02 17:21:06.536013355 +0000 UTC m=+2297.062902104" Dec 02 17:21:11 crc kubenswrapper[4747]: I1202 17:21:11.640204 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-t7m9k" Dec 02 17:21:11 crc kubenswrapper[4747]: I1202 17:21:11.642300 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-t7m9k" Dec 02 17:21:11 crc kubenswrapper[4747]: I1202 17:21:11.715054 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-t7m9k" Dec 02 17:21:12 crc kubenswrapper[4747]: I1202 17:21:12.628949 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-t7m9k" Dec 02 17:21:16 crc kubenswrapper[4747]: I1202 17:21:16.681838 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-t7m9k"] Dec 02 17:21:16 crc kubenswrapper[4747]: I1202 17:21:16.682577 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-t7m9k" podUID="72919804-668b-4ad9-a957-8e0070622cfa" containerName="registry-server" containerID="cri-o://fd83db387dd2766c5f2f5469625bf071110c04373532f42409d3374e5c4fbb9f" gracePeriod=2 Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.270843 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t7m9k" Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.451975 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72919804-668b-4ad9-a957-8e0070622cfa-utilities\") pod \"72919804-668b-4ad9-a957-8e0070622cfa\" (UID: \"72919804-668b-4ad9-a957-8e0070622cfa\") " Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.452395 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6vvl\" (UniqueName: \"kubernetes.io/projected/72919804-668b-4ad9-a957-8e0070622cfa-kube-api-access-l6vvl\") pod \"72919804-668b-4ad9-a957-8e0070622cfa\" (UID: \"72919804-668b-4ad9-a957-8e0070622cfa\") " Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.452507 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72919804-668b-4ad9-a957-8e0070622cfa-catalog-content\") pod \"72919804-668b-4ad9-a957-8e0070622cfa\" (UID: \"72919804-668b-4ad9-a957-8e0070622cfa\") " Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.453771 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72919804-668b-4ad9-a957-8e0070622cfa-utilities" (OuterVolumeSpecName: "utilities") pod "72919804-668b-4ad9-a957-8e0070622cfa" (UID: "72919804-668b-4ad9-a957-8e0070622cfa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.462125 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72919804-668b-4ad9-a957-8e0070622cfa-kube-api-access-l6vvl" (OuterVolumeSpecName: "kube-api-access-l6vvl") pod "72919804-668b-4ad9-a957-8e0070622cfa" (UID: "72919804-668b-4ad9-a957-8e0070622cfa"). InnerVolumeSpecName "kube-api-access-l6vvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.544191 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72919804-668b-4ad9-a957-8e0070622cfa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "72919804-668b-4ad9-a957-8e0070622cfa" (UID: "72919804-668b-4ad9-a957-8e0070622cfa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.555859 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/72919804-668b-4ad9-a957-8e0070622cfa-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.555932 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6vvl\" (UniqueName: \"kubernetes.io/projected/72919804-668b-4ad9-a957-8e0070622cfa-kube-api-access-l6vvl\") on node \"crc\" DevicePath \"\"" Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.556014 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/72919804-668b-4ad9-a957-8e0070622cfa-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.650678 4747 generic.go:334] "Generic (PLEG): container finished" podID="72919804-668b-4ad9-a957-8e0070622cfa" containerID="fd83db387dd2766c5f2f5469625bf071110c04373532f42409d3374e5c4fbb9f" exitCode=0 Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.650738 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t7m9k" event={"ID":"72919804-668b-4ad9-a957-8e0070622cfa","Type":"ContainerDied","Data":"fd83db387dd2766c5f2f5469625bf071110c04373532f42409d3374e5c4fbb9f"} Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.650774 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t7m9k" Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.650807 4747 scope.go:117] "RemoveContainer" containerID="fd83db387dd2766c5f2f5469625bf071110c04373532f42409d3374e5c4fbb9f" Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.650791 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t7m9k" event={"ID":"72919804-668b-4ad9-a957-8e0070622cfa","Type":"ContainerDied","Data":"20a6d15fd4897a295873ecfeaf69101bbd9690ed36f4838c238cbea4aa1bead6"} Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.673251 4747 scope.go:117] "RemoveContainer" containerID="dc9cafff1ce82eeb399d7e93639e38e13c94d278031e4a34ffa7e73accc2e1b6" Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.707399 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-t7m9k"] Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.775414 4747 scope.go:117] "RemoveContainer" containerID="27fa683b2f9f3128d4941562c9286b7c481fc6d531e09bf683694a6cc6a158f0" Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.793031 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-t7m9k"] Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.797620 4747 scope.go:117] "RemoveContainer" containerID="fd83db387dd2766c5f2f5469625bf071110c04373532f42409d3374e5c4fbb9f" Dec 02 17:21:17 crc kubenswrapper[4747]: E1202 17:21:17.798241 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd83db387dd2766c5f2f5469625bf071110c04373532f42409d3374e5c4fbb9f\": container with ID starting with fd83db387dd2766c5f2f5469625bf071110c04373532f42409d3374e5c4fbb9f not found: ID does not exist" containerID="fd83db387dd2766c5f2f5469625bf071110c04373532f42409d3374e5c4fbb9f" Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.798291 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd83db387dd2766c5f2f5469625bf071110c04373532f42409d3374e5c4fbb9f"} err="failed to get container status \"fd83db387dd2766c5f2f5469625bf071110c04373532f42409d3374e5c4fbb9f\": rpc error: code = NotFound desc = could not find container \"fd83db387dd2766c5f2f5469625bf071110c04373532f42409d3374e5c4fbb9f\": container with ID starting with fd83db387dd2766c5f2f5469625bf071110c04373532f42409d3374e5c4fbb9f not found: ID does not exist" Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.798321 4747 scope.go:117] "RemoveContainer" containerID="dc9cafff1ce82eeb399d7e93639e38e13c94d278031e4a34ffa7e73accc2e1b6" Dec 02 17:21:17 crc kubenswrapper[4747]: E1202 17:21:17.798847 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc9cafff1ce82eeb399d7e93639e38e13c94d278031e4a34ffa7e73accc2e1b6\": container with ID starting with dc9cafff1ce82eeb399d7e93639e38e13c94d278031e4a34ffa7e73accc2e1b6 not found: ID does not exist" containerID="dc9cafff1ce82eeb399d7e93639e38e13c94d278031e4a34ffa7e73accc2e1b6" Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.798902 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc9cafff1ce82eeb399d7e93639e38e13c94d278031e4a34ffa7e73accc2e1b6"} err="failed to get container status \"dc9cafff1ce82eeb399d7e93639e38e13c94d278031e4a34ffa7e73accc2e1b6\": rpc error: code = NotFound desc = could not find container \"dc9cafff1ce82eeb399d7e93639e38e13c94d278031e4a34ffa7e73accc2e1b6\": container with ID starting with dc9cafff1ce82eeb399d7e93639e38e13c94d278031e4a34ffa7e73accc2e1b6 not found: ID does not exist" Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.798967 4747 scope.go:117] "RemoveContainer" containerID="27fa683b2f9f3128d4941562c9286b7c481fc6d531e09bf683694a6cc6a158f0" Dec 02 17:21:17 crc kubenswrapper[4747]: E1202 17:21:17.799365 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27fa683b2f9f3128d4941562c9286b7c481fc6d531e09bf683694a6cc6a158f0\": container with ID starting with 27fa683b2f9f3128d4941562c9286b7c481fc6d531e09bf683694a6cc6a158f0 not found: ID does not exist" containerID="27fa683b2f9f3128d4941562c9286b7c481fc6d531e09bf683694a6cc6a158f0" Dec 02 17:21:17 crc kubenswrapper[4747]: I1202 17:21:17.799411 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27fa683b2f9f3128d4941562c9286b7c481fc6d531e09bf683694a6cc6a158f0"} err="failed to get container status \"27fa683b2f9f3128d4941562c9286b7c481fc6d531e09bf683694a6cc6a158f0\": rpc error: code = NotFound desc = could not find container \"27fa683b2f9f3128d4941562c9286b7c481fc6d531e09bf683694a6cc6a158f0\": container with ID starting with 27fa683b2f9f3128d4941562c9286b7c481fc6d531e09bf683694a6cc6a158f0 not found: ID does not exist" Dec 02 17:21:19 crc kubenswrapper[4747]: I1202 17:21:19.772328 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72919804-668b-4ad9-a957-8e0070622cfa" path="/var/lib/kubelet/pods/72919804-668b-4ad9-a957-8e0070622cfa/volumes" Dec 02 17:22:01 crc kubenswrapper[4747]: I1202 17:22:01.795099 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:22:01 crc kubenswrapper[4747]: I1202 17:22:01.795781 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:22:31 crc kubenswrapper[4747]: I1202 17:22:31.795881 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:22:31 crc kubenswrapper[4747]: I1202 17:22:31.796627 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:23:01 crc kubenswrapper[4747]: I1202 17:23:01.794780 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:23:01 crc kubenswrapper[4747]: I1202 17:23:01.795498 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:23:01 crc kubenswrapper[4747]: I1202 17:23:01.795560 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 17:23:01 crc kubenswrapper[4747]: I1202 17:23:01.796779 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d"} pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 17:23:01 crc kubenswrapper[4747]: I1202 17:23:01.796887 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" containerID="cri-o://2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" gracePeriod=600 Dec 02 17:23:01 crc kubenswrapper[4747]: E1202 17:23:01.921206 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:23:02 crc kubenswrapper[4747]: I1202 17:23:02.129784 4747 generic.go:334] "Generic (PLEG): container finished" podID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" exitCode=0 Dec 02 17:23:02 crc kubenswrapper[4747]: I1202 17:23:02.129854 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerDied","Data":"2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d"} Dec 02 17:23:02 crc kubenswrapper[4747]: I1202 17:23:02.129910 4747 scope.go:117] "RemoveContainer" containerID="ee94c253b80a66198a4fcfc6f17914c7c8d36b3792d805d2a1e16be971d427a2" Dec 02 17:23:02 crc kubenswrapper[4747]: I1202 17:23:02.130961 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:23:02 crc kubenswrapper[4747]: E1202 17:23:02.131451 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:23:16 crc kubenswrapper[4747]: I1202 17:23:16.760796 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:23:16 crc kubenswrapper[4747]: E1202 17:23:16.762206 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:23:29 crc kubenswrapper[4747]: I1202 17:23:29.829213 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:23:29 crc kubenswrapper[4747]: E1202 17:23:29.829891 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:23:43 crc kubenswrapper[4747]: I1202 17:23:43.760288 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:23:43 crc kubenswrapper[4747]: E1202 17:23:43.761045 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:23:58 crc kubenswrapper[4747]: I1202 17:23:58.762025 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:23:58 crc kubenswrapper[4747]: E1202 17:23:58.763422 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:24:13 crc kubenswrapper[4747]: I1202 17:24:13.761190 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:24:13 crc kubenswrapper[4747]: E1202 17:24:13.762692 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:24:26 crc kubenswrapper[4747]: I1202 17:24:26.761348 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:24:26 crc kubenswrapper[4747]: E1202 17:24:26.762419 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:24:31 crc kubenswrapper[4747]: I1202 17:24:31.207888 4747 generic.go:334] "Generic (PLEG): container finished" podID="577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8" containerID="8783296e556ce6a49b0add732c39b554da384921a5cc5a288a46b674880739f2" exitCode=0 Dec 02 17:24:31 crc kubenswrapper[4747]: I1202 17:24:31.207960 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" event={"ID":"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8","Type":"ContainerDied","Data":"8783296e556ce6a49b0add732c39b554da384921a5cc5a288a46b674880739f2"} Dec 02 17:24:32 crc kubenswrapper[4747]: I1202 17:24:32.734597 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:24:32 crc kubenswrapper[4747]: I1202 17:24:32.858141 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46r77\" (UniqueName: \"kubernetes.io/projected/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-kube-api-access-46r77\") pod \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " Dec 02 17:24:32 crc kubenswrapper[4747]: I1202 17:24:32.858219 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-inventory\") pod \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " Dec 02 17:24:32 crc kubenswrapper[4747]: I1202 17:24:32.858431 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-libvirt-secret-0\") pod \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " Dec 02 17:24:32 crc kubenswrapper[4747]: I1202 17:24:32.858493 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-ssh-key\") pod \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " Dec 02 17:24:32 crc kubenswrapper[4747]: I1202 17:24:32.858547 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-libvirt-combined-ca-bundle\") pod \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\" (UID: \"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8\") " Dec 02 17:24:32 crc kubenswrapper[4747]: I1202 17:24:32.868074 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-kube-api-access-46r77" (OuterVolumeSpecName: "kube-api-access-46r77") pod "577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8" (UID: "577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8"). InnerVolumeSpecName "kube-api-access-46r77". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:24:32 crc kubenswrapper[4747]: I1202 17:24:32.869980 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8" (UID: "577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:24:32 crc kubenswrapper[4747]: I1202 17:24:32.909227 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8" (UID: "577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:24:32 crc kubenswrapper[4747]: I1202 17:24:32.911259 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-inventory" (OuterVolumeSpecName: "inventory") pod "577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8" (UID: "577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:24:32 crc kubenswrapper[4747]: I1202 17:24:32.914632 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8" (UID: "577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:24:32 crc kubenswrapper[4747]: I1202 17:24:32.961370 4747 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:24:32 crc kubenswrapper[4747]: I1202 17:24:32.961404 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:24:32 crc kubenswrapper[4747]: I1202 17:24:32.961416 4747 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:24:32 crc kubenswrapper[4747]: I1202 17:24:32.961431 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46r77\" (UniqueName: \"kubernetes.io/projected/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-kube-api-access-46r77\") on node \"crc\" DevicePath \"\"" Dec 02 17:24:32 crc kubenswrapper[4747]: I1202 17:24:32.961443 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.230111 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" event={"ID":"577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8","Type":"ContainerDied","Data":"d8051e125be23b1a5ae20a874c0661f9bc58ce5b0dd1f9c574cd16bff9323812"} Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.230758 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d8051e125be23b1a5ae20a874c0661f9bc58ce5b0dd1f9c574cd16bff9323812" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.230184 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.390938 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6"] Dec 02 17:24:33 crc kubenswrapper[4747]: E1202 17:24:33.391885 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72919804-668b-4ad9-a957-8e0070622cfa" containerName="extract-utilities" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.391926 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="72919804-668b-4ad9-a957-8e0070622cfa" containerName="extract-utilities" Dec 02 17:24:33 crc kubenswrapper[4747]: E1202 17:24:33.391947 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.391958 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 02 17:24:33 crc kubenswrapper[4747]: E1202 17:24:33.391979 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72919804-668b-4ad9-a957-8e0070622cfa" containerName="extract-content" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.391987 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="72919804-668b-4ad9-a957-8e0070622cfa" containerName="extract-content" Dec 02 17:24:33 crc kubenswrapper[4747]: E1202 17:24:33.392002 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72919804-668b-4ad9-a957-8e0070622cfa" containerName="registry-server" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.392011 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="72919804-668b-4ad9-a957-8e0070622cfa" containerName="registry-server" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.392233 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="72919804-668b-4ad9-a957-8e0070622cfa" containerName="registry-server" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.392250 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.393036 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.398239 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.398620 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.398777 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.399091 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mt9sq" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.398829 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.398828 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.398828 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.405352 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6"] Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.580351 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xq8vg\" (UniqueName: \"kubernetes.io/projected/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-kube-api-access-xq8vg\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.580468 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.580520 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.580658 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.580712 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.580781 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.580860 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.580990 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.581054 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.682693 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.683189 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.683417 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.683650 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.684037 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xq8vg\" (UniqueName: \"kubernetes.io/projected/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-kube-api-access-xq8vg\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.684550 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.684837 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.685173 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.685519 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.686990 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.689504 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.689646 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.690129 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.690267 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.691207 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.692710 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.695421 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.723557 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xq8vg\" (UniqueName: \"kubernetes.io/projected/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-kube-api-access-xq8vg\") pod \"nova-edpm-deployment-openstack-edpm-ipam-dvmc6\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:33 crc kubenswrapper[4747]: I1202 17:24:33.731898 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:24:34 crc kubenswrapper[4747]: I1202 17:24:34.292430 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6"] Dec 02 17:24:34 crc kubenswrapper[4747]: I1202 17:24:34.303562 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 17:24:35 crc kubenswrapper[4747]: I1202 17:24:35.251803 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" event={"ID":"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a","Type":"ContainerStarted","Data":"12d8d1923f99207bf802cb551bbd11ebbd28b5ecdebbe11bb7c485a39bf45455"} Dec 02 17:24:35 crc kubenswrapper[4747]: I1202 17:24:35.252249 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" event={"ID":"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a","Type":"ContainerStarted","Data":"9c32eec213dd19ef8f1383d28b4ee184ead7f27292b0b60e4bfcf0a34753e194"} Dec 02 17:24:35 crc kubenswrapper[4747]: I1202 17:24:35.276708 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" podStartSLOduration=2.048771141 podStartE2EDuration="2.276691587s" podCreationTimestamp="2025-12-02 17:24:33 +0000 UTC" firstStartedPulling="2025-12-02 17:24:34.303266594 +0000 UTC m=+2504.830155353" lastFinishedPulling="2025-12-02 17:24:34.53118701 +0000 UTC m=+2505.058075799" observedRunningTime="2025-12-02 17:24:35.274470723 +0000 UTC m=+2505.801359502" watchObservedRunningTime="2025-12-02 17:24:35.276691587 +0000 UTC m=+2505.803580336" Dec 02 17:24:40 crc kubenswrapper[4747]: I1202 17:24:40.761120 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:24:40 crc kubenswrapper[4747]: E1202 17:24:40.761830 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:24:54 crc kubenswrapper[4747]: I1202 17:24:54.760511 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:24:54 crc kubenswrapper[4747]: E1202 17:24:54.761551 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:25:07 crc kubenswrapper[4747]: I1202 17:25:07.762077 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:25:07 crc kubenswrapper[4747]: E1202 17:25:07.763038 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:25:20 crc kubenswrapper[4747]: I1202 17:25:20.760746 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:25:20 crc kubenswrapper[4747]: E1202 17:25:20.762017 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:25:34 crc kubenswrapper[4747]: I1202 17:25:34.760664 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:25:34 crc kubenswrapper[4747]: E1202 17:25:34.761801 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:25:48 crc kubenswrapper[4747]: I1202 17:25:48.761528 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:25:48 crc kubenswrapper[4747]: E1202 17:25:48.762711 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:26:03 crc kubenswrapper[4747]: I1202 17:26:03.761391 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:26:03 crc kubenswrapper[4747]: E1202 17:26:03.762665 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:26:18 crc kubenswrapper[4747]: I1202 17:26:18.761583 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:26:18 crc kubenswrapper[4747]: E1202 17:26:18.764345 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:26:33 crc kubenswrapper[4747]: I1202 17:26:33.764347 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:26:33 crc kubenswrapper[4747]: E1202 17:26:33.765133 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:26:47 crc kubenswrapper[4747]: I1202 17:26:47.761149 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:26:47 crc kubenswrapper[4747]: E1202 17:26:47.762226 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:26:59 crc kubenswrapper[4747]: I1202 17:26:59.767302 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:26:59 crc kubenswrapper[4747]: E1202 17:26:59.767944 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:27:13 crc kubenswrapper[4747]: I1202 17:27:13.761488 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:27:13 crc kubenswrapper[4747]: E1202 17:27:13.762643 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:27:21 crc kubenswrapper[4747]: I1202 17:27:21.520001 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-skf4j"] Dec 02 17:27:21 crc kubenswrapper[4747]: I1202 17:27:21.523696 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-skf4j" Dec 02 17:27:21 crc kubenswrapper[4747]: I1202 17:27:21.544204 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-skf4j"] Dec 02 17:27:21 crc kubenswrapper[4747]: I1202 17:27:21.634752 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58gt7\" (UniqueName: \"kubernetes.io/projected/da7b7b85-bde8-4171-af42-6fbdd1109ec1-kube-api-access-58gt7\") pod \"redhat-marketplace-skf4j\" (UID: \"da7b7b85-bde8-4171-af42-6fbdd1109ec1\") " pod="openshift-marketplace/redhat-marketplace-skf4j" Dec 02 17:27:21 crc kubenswrapper[4747]: I1202 17:27:21.634955 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da7b7b85-bde8-4171-af42-6fbdd1109ec1-catalog-content\") pod \"redhat-marketplace-skf4j\" (UID: \"da7b7b85-bde8-4171-af42-6fbdd1109ec1\") " pod="openshift-marketplace/redhat-marketplace-skf4j" Dec 02 17:27:21 crc kubenswrapper[4747]: I1202 17:27:21.635040 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da7b7b85-bde8-4171-af42-6fbdd1109ec1-utilities\") pod \"redhat-marketplace-skf4j\" (UID: \"da7b7b85-bde8-4171-af42-6fbdd1109ec1\") " pod="openshift-marketplace/redhat-marketplace-skf4j" Dec 02 17:27:21 crc kubenswrapper[4747]: I1202 17:27:21.737017 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da7b7b85-bde8-4171-af42-6fbdd1109ec1-utilities\") pod \"redhat-marketplace-skf4j\" (UID: \"da7b7b85-bde8-4171-af42-6fbdd1109ec1\") " pod="openshift-marketplace/redhat-marketplace-skf4j" Dec 02 17:27:21 crc kubenswrapper[4747]: I1202 17:27:21.737099 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58gt7\" (UniqueName: \"kubernetes.io/projected/da7b7b85-bde8-4171-af42-6fbdd1109ec1-kube-api-access-58gt7\") pod \"redhat-marketplace-skf4j\" (UID: \"da7b7b85-bde8-4171-af42-6fbdd1109ec1\") " pod="openshift-marketplace/redhat-marketplace-skf4j" Dec 02 17:27:21 crc kubenswrapper[4747]: I1202 17:27:21.737190 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da7b7b85-bde8-4171-af42-6fbdd1109ec1-catalog-content\") pod \"redhat-marketplace-skf4j\" (UID: \"da7b7b85-bde8-4171-af42-6fbdd1109ec1\") " pod="openshift-marketplace/redhat-marketplace-skf4j" Dec 02 17:27:21 crc kubenswrapper[4747]: I1202 17:27:21.737570 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da7b7b85-bde8-4171-af42-6fbdd1109ec1-utilities\") pod \"redhat-marketplace-skf4j\" (UID: \"da7b7b85-bde8-4171-af42-6fbdd1109ec1\") " pod="openshift-marketplace/redhat-marketplace-skf4j" Dec 02 17:27:21 crc kubenswrapper[4747]: I1202 17:27:21.737591 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da7b7b85-bde8-4171-af42-6fbdd1109ec1-catalog-content\") pod \"redhat-marketplace-skf4j\" (UID: \"da7b7b85-bde8-4171-af42-6fbdd1109ec1\") " pod="openshift-marketplace/redhat-marketplace-skf4j" Dec 02 17:27:21 crc kubenswrapper[4747]: I1202 17:27:21.761733 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58gt7\" (UniqueName: \"kubernetes.io/projected/da7b7b85-bde8-4171-af42-6fbdd1109ec1-kube-api-access-58gt7\") pod \"redhat-marketplace-skf4j\" (UID: \"da7b7b85-bde8-4171-af42-6fbdd1109ec1\") " pod="openshift-marketplace/redhat-marketplace-skf4j" Dec 02 17:27:21 crc kubenswrapper[4747]: I1202 17:27:21.854941 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-skf4j" Dec 02 17:27:22 crc kubenswrapper[4747]: I1202 17:27:22.405830 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-skf4j"] Dec 02 17:27:22 crc kubenswrapper[4747]: W1202 17:27:22.412372 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda7b7b85_bde8_4171_af42_6fbdd1109ec1.slice/crio-76f9169b8d685cbe93323c8ee7ec9317ace34b7214926dfcdfce30bbbcded9a8 WatchSource:0}: Error finding container 76f9169b8d685cbe93323c8ee7ec9317ace34b7214926dfcdfce30bbbcded9a8: Status 404 returned error can't find the container with id 76f9169b8d685cbe93323c8ee7ec9317ace34b7214926dfcdfce30bbbcded9a8 Dec 02 17:27:23 crc kubenswrapper[4747]: I1202 17:27:23.204196 4747 generic.go:334] "Generic (PLEG): container finished" podID="da7b7b85-bde8-4171-af42-6fbdd1109ec1" containerID="b405ee39babccc0c2a22fb1187adade2414d7bfd00f693007a21865223867ab2" exitCode=0 Dec 02 17:27:23 crc kubenswrapper[4747]: I1202 17:27:23.206247 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skf4j" event={"ID":"da7b7b85-bde8-4171-af42-6fbdd1109ec1","Type":"ContainerDied","Data":"b405ee39babccc0c2a22fb1187adade2414d7bfd00f693007a21865223867ab2"} Dec 02 17:27:23 crc kubenswrapper[4747]: I1202 17:27:23.206828 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skf4j" event={"ID":"da7b7b85-bde8-4171-af42-6fbdd1109ec1","Type":"ContainerStarted","Data":"76f9169b8d685cbe93323c8ee7ec9317ace34b7214926dfcdfce30bbbcded9a8"} Dec 02 17:27:24 crc kubenswrapper[4747]: I1202 17:27:24.217250 4747 generic.go:334] "Generic (PLEG): container finished" podID="da7b7b85-bde8-4171-af42-6fbdd1109ec1" containerID="2420bf8891d7468c0e56cf4087b799946fabc4fbf62aaae828be9733ec3ef47f" exitCode=0 Dec 02 17:27:24 crc kubenswrapper[4747]: I1202 17:27:24.217482 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skf4j" event={"ID":"da7b7b85-bde8-4171-af42-6fbdd1109ec1","Type":"ContainerDied","Data":"2420bf8891d7468c0e56cf4087b799946fabc4fbf62aaae828be9733ec3ef47f"} Dec 02 17:27:24 crc kubenswrapper[4747]: I1202 17:27:24.760619 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:27:24 crc kubenswrapper[4747]: E1202 17:27:24.761339 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:27:25 crc kubenswrapper[4747]: I1202 17:27:25.231324 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skf4j" event={"ID":"da7b7b85-bde8-4171-af42-6fbdd1109ec1","Type":"ContainerStarted","Data":"5a148189e9b03db0633f4f741118dab7f698e2f62143585a037e8ed60d61c380"} Dec 02 17:27:25 crc kubenswrapper[4747]: I1202 17:27:25.260882 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-skf4j" podStartSLOduration=2.672981353 podStartE2EDuration="4.260862604s" podCreationTimestamp="2025-12-02 17:27:21 +0000 UTC" firstStartedPulling="2025-12-02 17:27:23.208077542 +0000 UTC m=+2673.734966301" lastFinishedPulling="2025-12-02 17:27:24.795958803 +0000 UTC m=+2675.322847552" observedRunningTime="2025-12-02 17:27:25.258197728 +0000 UTC m=+2675.785086487" watchObservedRunningTime="2025-12-02 17:27:25.260862604 +0000 UTC m=+2675.787751353" Dec 02 17:27:31 crc kubenswrapper[4747]: I1202 17:27:31.855392 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-skf4j" Dec 02 17:27:31 crc kubenswrapper[4747]: I1202 17:27:31.855963 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-skf4j" Dec 02 17:27:31 crc kubenswrapper[4747]: I1202 17:27:31.907093 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-skf4j" Dec 02 17:27:32 crc kubenswrapper[4747]: I1202 17:27:32.738127 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-skf4j" Dec 02 17:27:32 crc kubenswrapper[4747]: I1202 17:27:32.802597 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-skf4j"] Dec 02 17:27:34 crc kubenswrapper[4747]: I1202 17:27:34.666245 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-skf4j" podUID="da7b7b85-bde8-4171-af42-6fbdd1109ec1" containerName="registry-server" containerID="cri-o://5a148189e9b03db0633f4f741118dab7f698e2f62143585a037e8ed60d61c380" gracePeriod=2 Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.128389 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-skf4j" Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.261407 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da7b7b85-bde8-4171-af42-6fbdd1109ec1-utilities\") pod \"da7b7b85-bde8-4171-af42-6fbdd1109ec1\" (UID: \"da7b7b85-bde8-4171-af42-6fbdd1109ec1\") " Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.261619 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58gt7\" (UniqueName: \"kubernetes.io/projected/da7b7b85-bde8-4171-af42-6fbdd1109ec1-kube-api-access-58gt7\") pod \"da7b7b85-bde8-4171-af42-6fbdd1109ec1\" (UID: \"da7b7b85-bde8-4171-af42-6fbdd1109ec1\") " Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.261694 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da7b7b85-bde8-4171-af42-6fbdd1109ec1-catalog-content\") pod \"da7b7b85-bde8-4171-af42-6fbdd1109ec1\" (UID: \"da7b7b85-bde8-4171-af42-6fbdd1109ec1\") " Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.264086 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da7b7b85-bde8-4171-af42-6fbdd1109ec1-utilities" (OuterVolumeSpecName: "utilities") pod "da7b7b85-bde8-4171-af42-6fbdd1109ec1" (UID: "da7b7b85-bde8-4171-af42-6fbdd1109ec1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.267968 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da7b7b85-bde8-4171-af42-6fbdd1109ec1-kube-api-access-58gt7" (OuterVolumeSpecName: "kube-api-access-58gt7") pod "da7b7b85-bde8-4171-af42-6fbdd1109ec1" (UID: "da7b7b85-bde8-4171-af42-6fbdd1109ec1"). InnerVolumeSpecName "kube-api-access-58gt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.281959 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da7b7b85-bde8-4171-af42-6fbdd1109ec1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "da7b7b85-bde8-4171-af42-6fbdd1109ec1" (UID: "da7b7b85-bde8-4171-af42-6fbdd1109ec1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.365340 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da7b7b85-bde8-4171-af42-6fbdd1109ec1-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.365416 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58gt7\" (UniqueName: \"kubernetes.io/projected/da7b7b85-bde8-4171-af42-6fbdd1109ec1-kube-api-access-58gt7\") on node \"crc\" DevicePath \"\"" Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.365446 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da7b7b85-bde8-4171-af42-6fbdd1109ec1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.691110 4747 generic.go:334] "Generic (PLEG): container finished" podID="da7b7b85-bde8-4171-af42-6fbdd1109ec1" containerID="5a148189e9b03db0633f4f741118dab7f698e2f62143585a037e8ed60d61c380" exitCode=0 Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.691202 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skf4j" event={"ID":"da7b7b85-bde8-4171-af42-6fbdd1109ec1","Type":"ContainerDied","Data":"5a148189e9b03db0633f4f741118dab7f698e2f62143585a037e8ed60d61c380"} Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.691263 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-skf4j" Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.691609 4747 scope.go:117] "RemoveContainer" containerID="5a148189e9b03db0633f4f741118dab7f698e2f62143585a037e8ed60d61c380" Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.691584 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-skf4j" event={"ID":"da7b7b85-bde8-4171-af42-6fbdd1109ec1","Type":"ContainerDied","Data":"76f9169b8d685cbe93323c8ee7ec9317ace34b7214926dfcdfce30bbbcded9a8"} Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.731274 4747 scope.go:117] "RemoveContainer" containerID="2420bf8891d7468c0e56cf4087b799946fabc4fbf62aaae828be9733ec3ef47f" Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.761258 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:27:35 crc kubenswrapper[4747]: E1202 17:27:35.761524 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.769463 4747 scope.go:117] "RemoveContainer" containerID="b405ee39babccc0c2a22fb1187adade2414d7bfd00f693007a21865223867ab2" Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.776488 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-skf4j"] Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.777681 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-skf4j"] Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.805986 4747 scope.go:117] "RemoveContainer" containerID="5a148189e9b03db0633f4f741118dab7f698e2f62143585a037e8ed60d61c380" Dec 02 17:27:35 crc kubenswrapper[4747]: E1202 17:27:35.806487 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a148189e9b03db0633f4f741118dab7f698e2f62143585a037e8ed60d61c380\": container with ID starting with 5a148189e9b03db0633f4f741118dab7f698e2f62143585a037e8ed60d61c380 not found: ID does not exist" containerID="5a148189e9b03db0633f4f741118dab7f698e2f62143585a037e8ed60d61c380" Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.806529 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a148189e9b03db0633f4f741118dab7f698e2f62143585a037e8ed60d61c380"} err="failed to get container status \"5a148189e9b03db0633f4f741118dab7f698e2f62143585a037e8ed60d61c380\": rpc error: code = NotFound desc = could not find container \"5a148189e9b03db0633f4f741118dab7f698e2f62143585a037e8ed60d61c380\": container with ID starting with 5a148189e9b03db0633f4f741118dab7f698e2f62143585a037e8ed60d61c380 not found: ID does not exist" Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.806555 4747 scope.go:117] "RemoveContainer" containerID="2420bf8891d7468c0e56cf4087b799946fabc4fbf62aaae828be9733ec3ef47f" Dec 02 17:27:35 crc kubenswrapper[4747]: E1202 17:27:35.806772 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2420bf8891d7468c0e56cf4087b799946fabc4fbf62aaae828be9733ec3ef47f\": container with ID starting with 2420bf8891d7468c0e56cf4087b799946fabc4fbf62aaae828be9733ec3ef47f not found: ID does not exist" containerID="2420bf8891d7468c0e56cf4087b799946fabc4fbf62aaae828be9733ec3ef47f" Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.806798 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2420bf8891d7468c0e56cf4087b799946fabc4fbf62aaae828be9733ec3ef47f"} err="failed to get container status \"2420bf8891d7468c0e56cf4087b799946fabc4fbf62aaae828be9733ec3ef47f\": rpc error: code = NotFound desc = could not find container \"2420bf8891d7468c0e56cf4087b799946fabc4fbf62aaae828be9733ec3ef47f\": container with ID starting with 2420bf8891d7468c0e56cf4087b799946fabc4fbf62aaae828be9733ec3ef47f not found: ID does not exist" Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.806812 4747 scope.go:117] "RemoveContainer" containerID="b405ee39babccc0c2a22fb1187adade2414d7bfd00f693007a21865223867ab2" Dec 02 17:27:35 crc kubenswrapper[4747]: E1202 17:27:35.807644 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b405ee39babccc0c2a22fb1187adade2414d7bfd00f693007a21865223867ab2\": container with ID starting with b405ee39babccc0c2a22fb1187adade2414d7bfd00f693007a21865223867ab2 not found: ID does not exist" containerID="b405ee39babccc0c2a22fb1187adade2414d7bfd00f693007a21865223867ab2" Dec 02 17:27:35 crc kubenswrapper[4747]: I1202 17:27:35.807672 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b405ee39babccc0c2a22fb1187adade2414d7bfd00f693007a21865223867ab2"} err="failed to get container status \"b405ee39babccc0c2a22fb1187adade2414d7bfd00f693007a21865223867ab2\": rpc error: code = NotFound desc = could not find container \"b405ee39babccc0c2a22fb1187adade2414d7bfd00f693007a21865223867ab2\": container with ID starting with b405ee39babccc0c2a22fb1187adade2414d7bfd00f693007a21865223867ab2 not found: ID does not exist" Dec 02 17:27:37 crc kubenswrapper[4747]: I1202 17:27:37.780299 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da7b7b85-bde8-4171-af42-6fbdd1109ec1" path="/var/lib/kubelet/pods/da7b7b85-bde8-4171-af42-6fbdd1109ec1/volumes" Dec 02 17:27:47 crc kubenswrapper[4747]: I1202 17:27:47.761117 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:27:47 crc kubenswrapper[4747]: E1202 17:27:47.762761 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:27:55 crc kubenswrapper[4747]: I1202 17:27:55.903761 4747 generic.go:334] "Generic (PLEG): container finished" podID="6bf55284-16a0-45c0-8ce9-9e074f7d7e0a" containerID="12d8d1923f99207bf802cb551bbd11ebbd28b5ecdebbe11bb7c485a39bf45455" exitCode=0 Dec 02 17:27:55 crc kubenswrapper[4747]: I1202 17:27:55.903886 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" event={"ID":"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a","Type":"ContainerDied","Data":"12d8d1923f99207bf802cb551bbd11ebbd28b5ecdebbe11bb7c485a39bf45455"} Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.388794 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.571372 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-cell1-compute-config-1\") pod \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.571758 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-extra-config-0\") pod \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.571780 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-migration-ssh-key-1\") pod \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.571831 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-inventory\") pod \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.571860 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-ssh-key\") pod \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.572179 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-combined-ca-bundle\") pod \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.572249 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xq8vg\" (UniqueName: \"kubernetes.io/projected/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-kube-api-access-xq8vg\") pod \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.572297 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-migration-ssh-key-0\") pod \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.572371 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-cell1-compute-config-0\") pod \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\" (UID: \"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a\") " Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.578789 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a" (UID: "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.581010 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-kube-api-access-xq8vg" (OuterVolumeSpecName: "kube-api-access-xq8vg") pod "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a" (UID: "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a"). InnerVolumeSpecName "kube-api-access-xq8vg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.603541 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a" (UID: "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.620447 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a" (UID: "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.621788 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a" (UID: "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.621802 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-inventory" (OuterVolumeSpecName: "inventory") pod "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a" (UID: "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.624850 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a" (UID: "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.626443 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a" (UID: "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.626699 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a" (UID: "6bf55284-16a0-45c0-8ce9-9e074f7d7e0a"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.674883 4747 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.675130 4747 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.675171 4747 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.675184 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.675276 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.675291 4747 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.675403 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xq8vg\" (UniqueName: \"kubernetes.io/projected/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-kube-api-access-xq8vg\") on node \"crc\" DevicePath \"\"" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.675419 4747 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.675431 4747 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/6bf55284-16a0-45c0-8ce9-9e074f7d7e0a-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.926491 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" event={"ID":"6bf55284-16a0-45c0-8ce9-9e074f7d7e0a","Type":"ContainerDied","Data":"9c32eec213dd19ef8f1383d28b4ee184ead7f27292b0b60e4bfcf0a34753e194"} Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.926553 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c32eec213dd19ef8f1383d28b4ee184ead7f27292b0b60e4bfcf0a34753e194" Dec 02 17:27:57 crc kubenswrapper[4747]: I1202 17:27:57.926595 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-dvmc6" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.098526 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn"] Dec 02 17:27:58 crc kubenswrapper[4747]: E1202 17:27:58.099217 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da7b7b85-bde8-4171-af42-6fbdd1109ec1" containerName="extract-content" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.099234 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="da7b7b85-bde8-4171-af42-6fbdd1109ec1" containerName="extract-content" Dec 02 17:27:58 crc kubenswrapper[4747]: E1202 17:27:58.099252 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bf55284-16a0-45c0-8ce9-9e074f7d7e0a" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.099259 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bf55284-16a0-45c0-8ce9-9e074f7d7e0a" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 02 17:27:58 crc kubenswrapper[4747]: E1202 17:27:58.099287 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da7b7b85-bde8-4171-af42-6fbdd1109ec1" containerName="extract-utilities" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.099296 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="da7b7b85-bde8-4171-af42-6fbdd1109ec1" containerName="extract-utilities" Dec 02 17:27:58 crc kubenswrapper[4747]: E1202 17:27:58.099303 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da7b7b85-bde8-4171-af42-6fbdd1109ec1" containerName="registry-server" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.099309 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="da7b7b85-bde8-4171-af42-6fbdd1109ec1" containerName="registry-server" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.100053 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bf55284-16a0-45c0-8ce9-9e074f7d7e0a" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.100092 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="da7b7b85-bde8-4171-af42-6fbdd1109ec1" containerName="registry-server" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.100752 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.102873 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.103138 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.103293 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mt9sq" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.103485 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.110387 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.118738 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn"] Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.289216 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.289367 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.289425 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.289455 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8qh6\" (UniqueName: \"kubernetes.io/projected/aaa35a26-230d-4226-a19b-776a48b1bf07-kube-api-access-g8qh6\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.289502 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.289533 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.289580 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.391668 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.391747 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8qh6\" (UniqueName: \"kubernetes.io/projected/aaa35a26-230d-4226-a19b-776a48b1bf07-kube-api-access-g8qh6\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.391820 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.391860 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.392038 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.392193 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.392328 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.398003 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.398136 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.399552 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.399616 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.399805 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.400331 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.422051 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8qh6\" (UniqueName: \"kubernetes.io/projected/aaa35a26-230d-4226-a19b-776a48b1bf07-kube-api-access-g8qh6\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:58 crc kubenswrapper[4747]: I1202 17:27:58.720052 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:27:59 crc kubenswrapper[4747]: I1202 17:27:59.319447 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn"] Dec 02 17:27:59 crc kubenswrapper[4747]: I1202 17:27:59.970634 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" event={"ID":"aaa35a26-230d-4226-a19b-776a48b1bf07","Type":"ContainerStarted","Data":"7387e03e585b29a70e4de626c9e824cf987b1a5d90166a04e75f2617da25ecde"} Dec 02 17:27:59 crc kubenswrapper[4747]: I1202 17:27:59.971059 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" event={"ID":"aaa35a26-230d-4226-a19b-776a48b1bf07","Type":"ContainerStarted","Data":"eb2edca1d82e904944a39b130e180c3f51ef1cba225c8aa325c9c3a068892eca"} Dec 02 17:28:00 crc kubenswrapper[4747]: I1202 17:28:00.008106 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" podStartSLOduration=1.782160298 podStartE2EDuration="2.008084388s" podCreationTimestamp="2025-12-02 17:27:58 +0000 UTC" firstStartedPulling="2025-12-02 17:27:59.333865139 +0000 UTC m=+2709.860753898" lastFinishedPulling="2025-12-02 17:27:59.559789239 +0000 UTC m=+2710.086677988" observedRunningTime="2025-12-02 17:27:59.999866904 +0000 UTC m=+2710.526755683" watchObservedRunningTime="2025-12-02 17:28:00.008084388 +0000 UTC m=+2710.534973167" Dec 02 17:28:01 crc kubenswrapper[4747]: I1202 17:28:01.761081 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:28:01 crc kubenswrapper[4747]: E1202 17:28:01.762650 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:28:14 crc kubenswrapper[4747]: I1202 17:28:14.761383 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:28:15 crc kubenswrapper[4747]: I1202 17:28:15.173774 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"301c861d5e42340432be248256a4d3834ff99a0200966e47edb34d095843e970"} Dec 02 17:30:00 crc kubenswrapper[4747]: I1202 17:30:00.196494 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh"] Dec 02 17:30:00 crc kubenswrapper[4747]: I1202 17:30:00.198260 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh" Dec 02 17:30:00 crc kubenswrapper[4747]: I1202 17:30:00.200323 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 17:30:00 crc kubenswrapper[4747]: I1202 17:30:00.202058 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 17:30:00 crc kubenswrapper[4747]: I1202 17:30:00.205562 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh"] Dec 02 17:30:00 crc kubenswrapper[4747]: I1202 17:30:00.259514 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/98e34fb4-35f8-4987-8d7d-6fedb8028691-secret-volume\") pod \"collect-profiles-29411610-wr9xh\" (UID: \"98e34fb4-35f8-4987-8d7d-6fedb8028691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh" Dec 02 17:30:00 crc kubenswrapper[4747]: I1202 17:30:00.259835 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsv2d\" (UniqueName: \"kubernetes.io/projected/98e34fb4-35f8-4987-8d7d-6fedb8028691-kube-api-access-rsv2d\") pod \"collect-profiles-29411610-wr9xh\" (UID: \"98e34fb4-35f8-4987-8d7d-6fedb8028691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh" Dec 02 17:30:00 crc kubenswrapper[4747]: I1202 17:30:00.259963 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/98e34fb4-35f8-4987-8d7d-6fedb8028691-config-volume\") pod \"collect-profiles-29411610-wr9xh\" (UID: \"98e34fb4-35f8-4987-8d7d-6fedb8028691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh" Dec 02 17:30:00 crc kubenswrapper[4747]: I1202 17:30:00.361292 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/98e34fb4-35f8-4987-8d7d-6fedb8028691-secret-volume\") pod \"collect-profiles-29411610-wr9xh\" (UID: \"98e34fb4-35f8-4987-8d7d-6fedb8028691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh" Dec 02 17:30:00 crc kubenswrapper[4747]: I1202 17:30:00.361366 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsv2d\" (UniqueName: \"kubernetes.io/projected/98e34fb4-35f8-4987-8d7d-6fedb8028691-kube-api-access-rsv2d\") pod \"collect-profiles-29411610-wr9xh\" (UID: \"98e34fb4-35f8-4987-8d7d-6fedb8028691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh" Dec 02 17:30:00 crc kubenswrapper[4747]: I1202 17:30:00.361394 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/98e34fb4-35f8-4987-8d7d-6fedb8028691-config-volume\") pod \"collect-profiles-29411610-wr9xh\" (UID: \"98e34fb4-35f8-4987-8d7d-6fedb8028691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh" Dec 02 17:30:00 crc kubenswrapper[4747]: I1202 17:30:00.362451 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/98e34fb4-35f8-4987-8d7d-6fedb8028691-config-volume\") pod \"collect-profiles-29411610-wr9xh\" (UID: \"98e34fb4-35f8-4987-8d7d-6fedb8028691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh" Dec 02 17:30:00 crc kubenswrapper[4747]: I1202 17:30:00.368177 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/98e34fb4-35f8-4987-8d7d-6fedb8028691-secret-volume\") pod \"collect-profiles-29411610-wr9xh\" (UID: \"98e34fb4-35f8-4987-8d7d-6fedb8028691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh" Dec 02 17:30:00 crc kubenswrapper[4747]: I1202 17:30:00.390612 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsv2d\" (UniqueName: \"kubernetes.io/projected/98e34fb4-35f8-4987-8d7d-6fedb8028691-kube-api-access-rsv2d\") pod \"collect-profiles-29411610-wr9xh\" (UID: \"98e34fb4-35f8-4987-8d7d-6fedb8028691\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh" Dec 02 17:30:00 crc kubenswrapper[4747]: I1202 17:30:00.521609 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh" Dec 02 17:30:00 crc kubenswrapper[4747]: I1202 17:30:00.977242 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh"] Dec 02 17:30:01 crc kubenswrapper[4747]: I1202 17:30:01.412970 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh" event={"ID":"98e34fb4-35f8-4987-8d7d-6fedb8028691","Type":"ContainerStarted","Data":"b88378d775c63578fa184e974a8bb53254900b05a1bee14e15d1f69df77ee9f2"} Dec 02 17:30:01 crc kubenswrapper[4747]: I1202 17:30:01.413239 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh" event={"ID":"98e34fb4-35f8-4987-8d7d-6fedb8028691","Type":"ContainerStarted","Data":"60ce028cf042e53ea64b1e9f071e97e15320d615b503791612b9f03ae9ef29ea"} Dec 02 17:30:02 crc kubenswrapper[4747]: I1202 17:30:02.425296 4747 generic.go:334] "Generic (PLEG): container finished" podID="98e34fb4-35f8-4987-8d7d-6fedb8028691" containerID="b88378d775c63578fa184e974a8bb53254900b05a1bee14e15d1f69df77ee9f2" exitCode=0 Dec 02 17:30:02 crc kubenswrapper[4747]: I1202 17:30:02.425391 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh" event={"ID":"98e34fb4-35f8-4987-8d7d-6fedb8028691","Type":"ContainerDied","Data":"b88378d775c63578fa184e974a8bb53254900b05a1bee14e15d1f69df77ee9f2"} Dec 02 17:30:02 crc kubenswrapper[4747]: I1202 17:30:02.816780 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh" Dec 02 17:30:02 crc kubenswrapper[4747]: I1202 17:30:02.906708 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rsv2d\" (UniqueName: \"kubernetes.io/projected/98e34fb4-35f8-4987-8d7d-6fedb8028691-kube-api-access-rsv2d\") pod \"98e34fb4-35f8-4987-8d7d-6fedb8028691\" (UID: \"98e34fb4-35f8-4987-8d7d-6fedb8028691\") " Dec 02 17:30:02 crc kubenswrapper[4747]: I1202 17:30:02.906946 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/98e34fb4-35f8-4987-8d7d-6fedb8028691-secret-volume\") pod \"98e34fb4-35f8-4987-8d7d-6fedb8028691\" (UID: \"98e34fb4-35f8-4987-8d7d-6fedb8028691\") " Dec 02 17:30:02 crc kubenswrapper[4747]: I1202 17:30:02.907016 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/98e34fb4-35f8-4987-8d7d-6fedb8028691-config-volume\") pod \"98e34fb4-35f8-4987-8d7d-6fedb8028691\" (UID: \"98e34fb4-35f8-4987-8d7d-6fedb8028691\") " Dec 02 17:30:02 crc kubenswrapper[4747]: I1202 17:30:02.908036 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98e34fb4-35f8-4987-8d7d-6fedb8028691-config-volume" (OuterVolumeSpecName: "config-volume") pod "98e34fb4-35f8-4987-8d7d-6fedb8028691" (UID: "98e34fb4-35f8-4987-8d7d-6fedb8028691"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:30:02 crc kubenswrapper[4747]: I1202 17:30:02.911761 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98e34fb4-35f8-4987-8d7d-6fedb8028691-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "98e34fb4-35f8-4987-8d7d-6fedb8028691" (UID: "98e34fb4-35f8-4987-8d7d-6fedb8028691"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:30:02 crc kubenswrapper[4747]: I1202 17:30:02.913506 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98e34fb4-35f8-4987-8d7d-6fedb8028691-kube-api-access-rsv2d" (OuterVolumeSpecName: "kube-api-access-rsv2d") pod "98e34fb4-35f8-4987-8d7d-6fedb8028691" (UID: "98e34fb4-35f8-4987-8d7d-6fedb8028691"). InnerVolumeSpecName "kube-api-access-rsv2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:30:03 crc kubenswrapper[4747]: I1202 17:30:03.009276 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/98e34fb4-35f8-4987-8d7d-6fedb8028691-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 17:30:03 crc kubenswrapper[4747]: I1202 17:30:03.009332 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rsv2d\" (UniqueName: \"kubernetes.io/projected/98e34fb4-35f8-4987-8d7d-6fedb8028691-kube-api-access-rsv2d\") on node \"crc\" DevicePath \"\"" Dec 02 17:30:03 crc kubenswrapper[4747]: I1202 17:30:03.009347 4747 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/98e34fb4-35f8-4987-8d7d-6fedb8028691-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 17:30:03 crc kubenswrapper[4747]: I1202 17:30:03.437220 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh" event={"ID":"98e34fb4-35f8-4987-8d7d-6fedb8028691","Type":"ContainerDied","Data":"60ce028cf042e53ea64b1e9f071e97e15320d615b503791612b9f03ae9ef29ea"} Dec 02 17:30:03 crc kubenswrapper[4747]: I1202 17:30:03.437283 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60ce028cf042e53ea64b1e9f071e97e15320d615b503791612b9f03ae9ef29ea" Dec 02 17:30:03 crc kubenswrapper[4747]: I1202 17:30:03.437301 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411610-wr9xh" Dec 02 17:30:03 crc kubenswrapper[4747]: I1202 17:30:03.902135 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k"] Dec 02 17:30:03 crc kubenswrapper[4747]: I1202 17:30:03.912113 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411565-x599k"] Dec 02 17:30:05 crc kubenswrapper[4747]: I1202 17:30:05.780609 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2300c9ec-034d-4103-8a50-a9c5c507f1ad" path="/var/lib/kubelet/pods/2300c9ec-034d-4103-8a50-a9c5c507f1ad/volumes" Dec 02 17:30:08 crc kubenswrapper[4747]: I1202 17:30:08.328299 4747 scope.go:117] "RemoveContainer" containerID="4171828032fd92dcc8dc89ed341f4732b882280991babea504307e3880035c73" Dec 02 17:30:22 crc kubenswrapper[4747]: I1202 17:30:22.629738 4747 generic.go:334] "Generic (PLEG): container finished" podID="aaa35a26-230d-4226-a19b-776a48b1bf07" containerID="7387e03e585b29a70e4de626c9e824cf987b1a5d90166a04e75f2617da25ecde" exitCode=0 Dec 02 17:30:22 crc kubenswrapper[4747]: I1202 17:30:22.629806 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" event={"ID":"aaa35a26-230d-4226-a19b-776a48b1bf07","Type":"ContainerDied","Data":"7387e03e585b29a70e4de626c9e824cf987b1a5d90166a04e75f2617da25ecde"} Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.100876 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.125153 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-telemetry-combined-ca-bundle\") pod \"aaa35a26-230d-4226-a19b-776a48b1bf07\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.125220 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-0\") pod \"aaa35a26-230d-4226-a19b-776a48b1bf07\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.125244 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-2\") pod \"aaa35a26-230d-4226-a19b-776a48b1bf07\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.125347 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-inventory\") pod \"aaa35a26-230d-4226-a19b-776a48b1bf07\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.125387 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8qh6\" (UniqueName: \"kubernetes.io/projected/aaa35a26-230d-4226-a19b-776a48b1bf07-kube-api-access-g8qh6\") pod \"aaa35a26-230d-4226-a19b-776a48b1bf07\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.125628 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-1\") pod \"aaa35a26-230d-4226-a19b-776a48b1bf07\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.125648 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ssh-key\") pod \"aaa35a26-230d-4226-a19b-776a48b1bf07\" (UID: \"aaa35a26-230d-4226-a19b-776a48b1bf07\") " Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.134950 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "aaa35a26-230d-4226-a19b-776a48b1bf07" (UID: "aaa35a26-230d-4226-a19b-776a48b1bf07"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.136151 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aaa35a26-230d-4226-a19b-776a48b1bf07-kube-api-access-g8qh6" (OuterVolumeSpecName: "kube-api-access-g8qh6") pod "aaa35a26-230d-4226-a19b-776a48b1bf07" (UID: "aaa35a26-230d-4226-a19b-776a48b1bf07"). InnerVolumeSpecName "kube-api-access-g8qh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.161373 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "aaa35a26-230d-4226-a19b-776a48b1bf07" (UID: "aaa35a26-230d-4226-a19b-776a48b1bf07"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.181393 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "aaa35a26-230d-4226-a19b-776a48b1bf07" (UID: "aaa35a26-230d-4226-a19b-776a48b1bf07"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.182290 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "aaa35a26-230d-4226-a19b-776a48b1bf07" (UID: "aaa35a26-230d-4226-a19b-776a48b1bf07"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.182314 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "aaa35a26-230d-4226-a19b-776a48b1bf07" (UID: "aaa35a26-230d-4226-a19b-776a48b1bf07"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.185068 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-inventory" (OuterVolumeSpecName: "inventory") pod "aaa35a26-230d-4226-a19b-776a48b1bf07" (UID: "aaa35a26-230d-4226-a19b-776a48b1bf07"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.228427 4747 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.228717 4747 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.228731 4747 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.228746 4747 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.228777 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8qh6\" (UniqueName: \"kubernetes.io/projected/aaa35a26-230d-4226-a19b-776a48b1bf07-kube-api-access-g8qh6\") on node \"crc\" DevicePath \"\"" Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.228791 4747 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.228800 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/aaa35a26-230d-4226-a19b-776a48b1bf07-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.654146 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" event={"ID":"aaa35a26-230d-4226-a19b-776a48b1bf07","Type":"ContainerDied","Data":"eb2edca1d82e904944a39b130e180c3f51ef1cba225c8aa325c9c3a068892eca"} Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.654189 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb2edca1d82e904944a39b130e180c3f51ef1cba225c8aa325c9c3a068892eca" Dec 02 17:30:24 crc kubenswrapper[4747]: I1202 17:30:24.654304 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn" Dec 02 17:30:24 crc kubenswrapper[4747]: E1202 17:30:24.744291 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaaa35a26_230d_4226_a19b_776a48b1bf07.slice/crio-eb2edca1d82e904944a39b130e180c3f51ef1cba225c8aa325c9c3a068892eca\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaaa35a26_230d_4226_a19b_776a48b1bf07.slice\": RecentStats: unable to find data in memory cache]" Dec 02 17:30:29 crc kubenswrapper[4747]: E1202 17:30:29.342029 4747 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.201:36616->38.102.83.201:33503: write tcp 38.102.83.201:36616->38.102.83.201:33503: write: broken pipe Dec 02 17:30:31 crc kubenswrapper[4747]: I1202 17:30:31.795051 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:30:31 crc kubenswrapper[4747]: I1202 17:30:31.795359 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:31:01 crc kubenswrapper[4747]: I1202 17:31:01.794672 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:31:01 crc kubenswrapper[4747]: I1202 17:31:01.795184 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:31:08 crc kubenswrapper[4747]: I1202 17:31:08.956526 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x6chq"] Dec 02 17:31:08 crc kubenswrapper[4747]: E1202 17:31:08.957683 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98e34fb4-35f8-4987-8d7d-6fedb8028691" containerName="collect-profiles" Dec 02 17:31:08 crc kubenswrapper[4747]: I1202 17:31:08.957704 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="98e34fb4-35f8-4987-8d7d-6fedb8028691" containerName="collect-profiles" Dec 02 17:31:08 crc kubenswrapper[4747]: E1202 17:31:08.957728 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaa35a26-230d-4226-a19b-776a48b1bf07" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 02 17:31:08 crc kubenswrapper[4747]: I1202 17:31:08.957743 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaa35a26-230d-4226-a19b-776a48b1bf07" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 02 17:31:08 crc kubenswrapper[4747]: I1202 17:31:08.958444 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="aaa35a26-230d-4226-a19b-776a48b1bf07" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 02 17:31:08 crc kubenswrapper[4747]: I1202 17:31:08.958483 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="98e34fb4-35f8-4987-8d7d-6fedb8028691" containerName="collect-profiles" Dec 02 17:31:08 crc kubenswrapper[4747]: I1202 17:31:08.960928 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6chq" Dec 02 17:31:08 crc kubenswrapper[4747]: I1202 17:31:08.978887 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x6chq"] Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.112375 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-utilities\") pod \"certified-operators-x6chq\" (UID: \"54bf01ed-0add-4d3a-93ad-2dc405e7a79d\") " pod="openshift-marketplace/certified-operators-x6chq" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.112576 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-catalog-content\") pod \"certified-operators-x6chq\" (UID: \"54bf01ed-0add-4d3a-93ad-2dc405e7a79d\") " pod="openshift-marketplace/certified-operators-x6chq" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.112646 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkrtt\" (UniqueName: \"kubernetes.io/projected/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-kube-api-access-xkrtt\") pod \"certified-operators-x6chq\" (UID: \"54bf01ed-0add-4d3a-93ad-2dc405e7a79d\") " pod="openshift-marketplace/certified-operators-x6chq" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.143229 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-q8nnz"] Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.145569 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q8nnz" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.165698 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q8nnz"] Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.214423 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-catalog-content\") pod \"certified-operators-x6chq\" (UID: \"54bf01ed-0add-4d3a-93ad-2dc405e7a79d\") " pod="openshift-marketplace/certified-operators-x6chq" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.214525 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkrtt\" (UniqueName: \"kubernetes.io/projected/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-kube-api-access-xkrtt\") pod \"certified-operators-x6chq\" (UID: \"54bf01ed-0add-4d3a-93ad-2dc405e7a79d\") " pod="openshift-marketplace/certified-operators-x6chq" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.214613 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-utilities\") pod \"certified-operators-x6chq\" (UID: \"54bf01ed-0add-4d3a-93ad-2dc405e7a79d\") " pod="openshift-marketplace/certified-operators-x6chq" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.214918 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-catalog-content\") pod \"certified-operators-x6chq\" (UID: \"54bf01ed-0add-4d3a-93ad-2dc405e7a79d\") " pod="openshift-marketplace/certified-operators-x6chq" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.215175 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-utilities\") pod \"certified-operators-x6chq\" (UID: \"54bf01ed-0add-4d3a-93ad-2dc405e7a79d\") " pod="openshift-marketplace/certified-operators-x6chq" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.238504 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkrtt\" (UniqueName: \"kubernetes.io/projected/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-kube-api-access-xkrtt\") pod \"certified-operators-x6chq\" (UID: \"54bf01ed-0add-4d3a-93ad-2dc405e7a79d\") " pod="openshift-marketplace/certified-operators-x6chq" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.286704 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6chq" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.316886 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01816c39-7390-4439-b3df-59e76ccaebc9-utilities\") pod \"community-operators-q8nnz\" (UID: \"01816c39-7390-4439-b3df-59e76ccaebc9\") " pod="openshift-marketplace/community-operators-q8nnz" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.316965 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01816c39-7390-4439-b3df-59e76ccaebc9-catalog-content\") pod \"community-operators-q8nnz\" (UID: \"01816c39-7390-4439-b3df-59e76ccaebc9\") " pod="openshift-marketplace/community-operators-q8nnz" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.317106 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htjk9\" (UniqueName: \"kubernetes.io/projected/01816c39-7390-4439-b3df-59e76ccaebc9-kube-api-access-htjk9\") pod \"community-operators-q8nnz\" (UID: \"01816c39-7390-4439-b3df-59e76ccaebc9\") " pod="openshift-marketplace/community-operators-q8nnz" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.418305 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01816c39-7390-4439-b3df-59e76ccaebc9-utilities\") pod \"community-operators-q8nnz\" (UID: \"01816c39-7390-4439-b3df-59e76ccaebc9\") " pod="openshift-marketplace/community-operators-q8nnz" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.418615 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01816c39-7390-4439-b3df-59e76ccaebc9-catalog-content\") pod \"community-operators-q8nnz\" (UID: \"01816c39-7390-4439-b3df-59e76ccaebc9\") " pod="openshift-marketplace/community-operators-q8nnz" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.418791 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01816c39-7390-4439-b3df-59e76ccaebc9-utilities\") pod \"community-operators-q8nnz\" (UID: \"01816c39-7390-4439-b3df-59e76ccaebc9\") " pod="openshift-marketplace/community-operators-q8nnz" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.418802 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htjk9\" (UniqueName: \"kubernetes.io/projected/01816c39-7390-4439-b3df-59e76ccaebc9-kube-api-access-htjk9\") pod \"community-operators-q8nnz\" (UID: \"01816c39-7390-4439-b3df-59e76ccaebc9\") " pod="openshift-marketplace/community-operators-q8nnz" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.419003 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01816c39-7390-4439-b3df-59e76ccaebc9-catalog-content\") pod \"community-operators-q8nnz\" (UID: \"01816c39-7390-4439-b3df-59e76ccaebc9\") " pod="openshift-marketplace/community-operators-q8nnz" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.449325 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htjk9\" (UniqueName: \"kubernetes.io/projected/01816c39-7390-4439-b3df-59e76ccaebc9-kube-api-access-htjk9\") pod \"community-operators-q8nnz\" (UID: \"01816c39-7390-4439-b3df-59e76ccaebc9\") " pod="openshift-marketplace/community-operators-q8nnz" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.466711 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q8nnz" Dec 02 17:31:09 crc kubenswrapper[4747]: I1202 17:31:09.921547 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x6chq"] Dec 02 17:31:10 crc kubenswrapper[4747]: I1202 17:31:10.071045 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q8nnz"] Dec 02 17:31:10 crc kubenswrapper[4747]: W1202 17:31:10.071733 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod01816c39_7390_4439_b3df_59e76ccaebc9.slice/crio-b5a946712d678276c8588299a7be161c60a08df51496fa5d3b7c8bc3cdd0a0b5 WatchSource:0}: Error finding container b5a946712d678276c8588299a7be161c60a08df51496fa5d3b7c8bc3cdd0a0b5: Status 404 returned error can't find the container with id b5a946712d678276c8588299a7be161c60a08df51496fa5d3b7c8bc3cdd0a0b5 Dec 02 17:31:10 crc kubenswrapper[4747]: I1202 17:31:10.352860 4747 generic.go:334] "Generic (PLEG): container finished" podID="01816c39-7390-4439-b3df-59e76ccaebc9" containerID="9bedaf6f6ad67a74bbd93b4d34dbe70b455cff64ca08c6a806832868fbd7172d" exitCode=0 Dec 02 17:31:10 crc kubenswrapper[4747]: I1202 17:31:10.352944 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q8nnz" event={"ID":"01816c39-7390-4439-b3df-59e76ccaebc9","Type":"ContainerDied","Data":"9bedaf6f6ad67a74bbd93b4d34dbe70b455cff64ca08c6a806832868fbd7172d"} Dec 02 17:31:10 crc kubenswrapper[4747]: I1202 17:31:10.353295 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q8nnz" event={"ID":"01816c39-7390-4439-b3df-59e76ccaebc9","Type":"ContainerStarted","Data":"b5a946712d678276c8588299a7be161c60a08df51496fa5d3b7c8bc3cdd0a0b5"} Dec 02 17:31:10 crc kubenswrapper[4747]: I1202 17:31:10.358803 4747 generic.go:334] "Generic (PLEG): container finished" podID="54bf01ed-0add-4d3a-93ad-2dc405e7a79d" containerID="b5db183501bc72f4cc43a5b13e0253533b8a1246d36512cf158f5045f7425e7b" exitCode=0 Dec 02 17:31:10 crc kubenswrapper[4747]: I1202 17:31:10.358859 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6chq" event={"ID":"54bf01ed-0add-4d3a-93ad-2dc405e7a79d","Type":"ContainerDied","Data":"b5db183501bc72f4cc43a5b13e0253533b8a1246d36512cf158f5045f7425e7b"} Dec 02 17:31:10 crc kubenswrapper[4747]: I1202 17:31:10.358897 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6chq" event={"ID":"54bf01ed-0add-4d3a-93ad-2dc405e7a79d","Type":"ContainerStarted","Data":"550e1a07cce4653372546c288cfb20e955cce30a86f6b0dfb1601e05a62b1d14"} Dec 02 17:31:10 crc kubenswrapper[4747]: I1202 17:31:10.360269 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 17:31:11 crc kubenswrapper[4747]: I1202 17:31:11.374999 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q8nnz" event={"ID":"01816c39-7390-4439-b3df-59e76ccaebc9","Type":"ContainerStarted","Data":"6c207249f26d5fe73f94ef76b5cb59e000195d774cd19cd12bccb51c069ee987"} Dec 02 17:31:11 crc kubenswrapper[4747]: I1202 17:31:11.544339 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b85g2"] Dec 02 17:31:11 crc kubenswrapper[4747]: I1202 17:31:11.548384 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b85g2" Dec 02 17:31:11 crc kubenswrapper[4747]: I1202 17:31:11.558447 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b85g2"] Dec 02 17:31:11 crc kubenswrapper[4747]: I1202 17:31:11.667224 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbqhd\" (UniqueName: \"kubernetes.io/projected/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-kube-api-access-zbqhd\") pod \"redhat-operators-b85g2\" (UID: \"ca465c6a-13c8-4a37-b14c-568f4fba4cdb\") " pod="openshift-marketplace/redhat-operators-b85g2" Dec 02 17:31:11 crc kubenswrapper[4747]: I1202 17:31:11.667331 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-catalog-content\") pod \"redhat-operators-b85g2\" (UID: \"ca465c6a-13c8-4a37-b14c-568f4fba4cdb\") " pod="openshift-marketplace/redhat-operators-b85g2" Dec 02 17:31:11 crc kubenswrapper[4747]: I1202 17:31:11.667427 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-utilities\") pod \"redhat-operators-b85g2\" (UID: \"ca465c6a-13c8-4a37-b14c-568f4fba4cdb\") " pod="openshift-marketplace/redhat-operators-b85g2" Dec 02 17:31:11 crc kubenswrapper[4747]: I1202 17:31:11.768346 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-utilities\") pod \"redhat-operators-b85g2\" (UID: \"ca465c6a-13c8-4a37-b14c-568f4fba4cdb\") " pod="openshift-marketplace/redhat-operators-b85g2" Dec 02 17:31:11 crc kubenswrapper[4747]: I1202 17:31:11.768437 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbqhd\" (UniqueName: \"kubernetes.io/projected/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-kube-api-access-zbqhd\") pod \"redhat-operators-b85g2\" (UID: \"ca465c6a-13c8-4a37-b14c-568f4fba4cdb\") " pod="openshift-marketplace/redhat-operators-b85g2" Dec 02 17:31:11 crc kubenswrapper[4747]: I1202 17:31:11.768483 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-catalog-content\") pod \"redhat-operators-b85g2\" (UID: \"ca465c6a-13c8-4a37-b14c-568f4fba4cdb\") " pod="openshift-marketplace/redhat-operators-b85g2" Dec 02 17:31:11 crc kubenswrapper[4747]: I1202 17:31:11.769003 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-catalog-content\") pod \"redhat-operators-b85g2\" (UID: \"ca465c6a-13c8-4a37-b14c-568f4fba4cdb\") " pod="openshift-marketplace/redhat-operators-b85g2" Dec 02 17:31:11 crc kubenswrapper[4747]: I1202 17:31:11.769234 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-utilities\") pod \"redhat-operators-b85g2\" (UID: \"ca465c6a-13c8-4a37-b14c-568f4fba4cdb\") " pod="openshift-marketplace/redhat-operators-b85g2" Dec 02 17:31:11 crc kubenswrapper[4747]: I1202 17:31:11.795838 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbqhd\" (UniqueName: \"kubernetes.io/projected/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-kube-api-access-zbqhd\") pod \"redhat-operators-b85g2\" (UID: \"ca465c6a-13c8-4a37-b14c-568f4fba4cdb\") " pod="openshift-marketplace/redhat-operators-b85g2" Dec 02 17:31:11 crc kubenswrapper[4747]: I1202 17:31:11.867187 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b85g2" Dec 02 17:31:12 crc kubenswrapper[4747]: I1202 17:31:12.311103 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b85g2"] Dec 02 17:31:12 crc kubenswrapper[4747]: I1202 17:31:12.396149 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6chq" event={"ID":"54bf01ed-0add-4d3a-93ad-2dc405e7a79d","Type":"ContainerStarted","Data":"3425cddafee6d5099cc8e8de470e11e96b7d3ebc258c9c8e72f035685d363fc5"} Dec 02 17:31:12 crc kubenswrapper[4747]: I1202 17:31:12.401623 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b85g2" event={"ID":"ca465c6a-13c8-4a37-b14c-568f4fba4cdb","Type":"ContainerStarted","Data":"02da0337e7447e6f00254e8b2c0af713f916959665b6821a9aa672187220e7a9"} Dec 02 17:31:12 crc kubenswrapper[4747]: I1202 17:31:12.404236 4747 generic.go:334] "Generic (PLEG): container finished" podID="01816c39-7390-4439-b3df-59e76ccaebc9" containerID="6c207249f26d5fe73f94ef76b5cb59e000195d774cd19cd12bccb51c069ee987" exitCode=0 Dec 02 17:31:12 crc kubenswrapper[4747]: I1202 17:31:12.404279 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q8nnz" event={"ID":"01816c39-7390-4439-b3df-59e76ccaebc9","Type":"ContainerDied","Data":"6c207249f26d5fe73f94ef76b5cb59e000195d774cd19cd12bccb51c069ee987"} Dec 02 17:31:13 crc kubenswrapper[4747]: I1202 17:31:13.422541 4747 generic.go:334] "Generic (PLEG): container finished" podID="54bf01ed-0add-4d3a-93ad-2dc405e7a79d" containerID="3425cddafee6d5099cc8e8de470e11e96b7d3ebc258c9c8e72f035685d363fc5" exitCode=0 Dec 02 17:31:13 crc kubenswrapper[4747]: I1202 17:31:13.423098 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6chq" event={"ID":"54bf01ed-0add-4d3a-93ad-2dc405e7a79d","Type":"ContainerDied","Data":"3425cddafee6d5099cc8e8de470e11e96b7d3ebc258c9c8e72f035685d363fc5"} Dec 02 17:31:14 crc kubenswrapper[4747]: I1202 17:31:14.436577 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6chq" event={"ID":"54bf01ed-0add-4d3a-93ad-2dc405e7a79d","Type":"ContainerStarted","Data":"4459ad11b6a8bb267adfc801b73267a4c8fd25630509cd059f74ece8dcf657fe"} Dec 02 17:31:14 crc kubenswrapper[4747]: I1202 17:31:14.439559 4747 generic.go:334] "Generic (PLEG): container finished" podID="ca465c6a-13c8-4a37-b14c-568f4fba4cdb" containerID="24d33bba422ffeba1c710a2f00ca57be9797429f3f50c26d5cb3d252e2468e50" exitCode=0 Dec 02 17:31:14 crc kubenswrapper[4747]: I1202 17:31:14.439614 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b85g2" event={"ID":"ca465c6a-13c8-4a37-b14c-568f4fba4cdb","Type":"ContainerDied","Data":"24d33bba422ffeba1c710a2f00ca57be9797429f3f50c26d5cb3d252e2468e50"} Dec 02 17:31:14 crc kubenswrapper[4747]: I1202 17:31:14.442972 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q8nnz" event={"ID":"01816c39-7390-4439-b3df-59e76ccaebc9","Type":"ContainerStarted","Data":"70cc5f9d736007ff06336fde577b70bd2617508688b9949d3f1ae232f3408e5c"} Dec 02 17:31:14 crc kubenswrapper[4747]: I1202 17:31:14.475005 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x6chq" podStartSLOduration=3.03100187 podStartE2EDuration="6.47497942s" podCreationTimestamp="2025-12-02 17:31:08 +0000 UTC" firstStartedPulling="2025-12-02 17:31:10.36264315 +0000 UTC m=+2900.889531899" lastFinishedPulling="2025-12-02 17:31:13.80662069 +0000 UTC m=+2904.333509449" observedRunningTime="2025-12-02 17:31:14.470532333 +0000 UTC m=+2904.997421082" watchObservedRunningTime="2025-12-02 17:31:14.47497942 +0000 UTC m=+2905.001868179" Dec 02 17:31:14 crc kubenswrapper[4747]: I1202 17:31:14.504866 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-q8nnz" podStartSLOduration=2.284474248 podStartE2EDuration="5.504838908s" podCreationTimestamp="2025-12-02 17:31:09 +0000 UTC" firstStartedPulling="2025-12-02 17:31:10.359933153 +0000 UTC m=+2900.886821902" lastFinishedPulling="2025-12-02 17:31:13.580297803 +0000 UTC m=+2904.107186562" observedRunningTime="2025-12-02 17:31:14.491395776 +0000 UTC m=+2905.018284555" watchObservedRunningTime="2025-12-02 17:31:14.504838908 +0000 UTC m=+2905.031727687" Dec 02 17:31:16 crc kubenswrapper[4747]: I1202 17:31:16.461514 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b85g2" event={"ID":"ca465c6a-13c8-4a37-b14c-568f4fba4cdb","Type":"ContainerStarted","Data":"f3a12915d534c9247d70449c5ea303424c088175ee286957c68446fc8d7dd32c"} Dec 02 17:31:17 crc kubenswrapper[4747]: I1202 17:31:17.475761 4747 generic.go:334] "Generic (PLEG): container finished" podID="ca465c6a-13c8-4a37-b14c-568f4fba4cdb" containerID="f3a12915d534c9247d70449c5ea303424c088175ee286957c68446fc8d7dd32c" exitCode=0 Dec 02 17:31:17 crc kubenswrapper[4747]: I1202 17:31:17.475822 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b85g2" event={"ID":"ca465c6a-13c8-4a37-b14c-568f4fba4cdb","Type":"ContainerDied","Data":"f3a12915d534c9247d70449c5ea303424c088175ee286957c68446fc8d7dd32c"} Dec 02 17:31:19 crc kubenswrapper[4747]: I1202 17:31:19.287626 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x6chq" Dec 02 17:31:19 crc kubenswrapper[4747]: I1202 17:31:19.288191 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x6chq" Dec 02 17:31:19 crc kubenswrapper[4747]: I1202 17:31:19.344083 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x6chq" Dec 02 17:31:19 crc kubenswrapper[4747]: I1202 17:31:19.467830 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-q8nnz" Dec 02 17:31:19 crc kubenswrapper[4747]: I1202 17:31:19.468170 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-q8nnz" Dec 02 17:31:19 crc kubenswrapper[4747]: I1202 17:31:19.504023 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b85g2" event={"ID":"ca465c6a-13c8-4a37-b14c-568f4fba4cdb","Type":"ContainerStarted","Data":"6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059"} Dec 02 17:31:19 crc kubenswrapper[4747]: I1202 17:31:19.529351 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b85g2" podStartSLOduration=4.431323366 podStartE2EDuration="8.52933414s" podCreationTimestamp="2025-12-02 17:31:11 +0000 UTC" firstStartedPulling="2025-12-02 17:31:14.444055451 +0000 UTC m=+2904.970944250" lastFinishedPulling="2025-12-02 17:31:18.542066225 +0000 UTC m=+2909.068955024" observedRunningTime="2025-12-02 17:31:19.522692051 +0000 UTC m=+2910.049580800" watchObservedRunningTime="2025-12-02 17:31:19.52933414 +0000 UTC m=+2910.056222889" Dec 02 17:31:19 crc kubenswrapper[4747]: I1202 17:31:19.535346 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-q8nnz" Dec 02 17:31:19 crc kubenswrapper[4747]: I1202 17:31:19.566614 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x6chq" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.054425 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.060896 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.068220 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.076653 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.076710 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-gkdls" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.076928 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.078113 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.153233 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d7c276fe-92e7-4429-b6f8-d9488337b369-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.153283 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7c276fe-92e7-4429-b6f8-d9488337b369-config-data\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.153376 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.255780 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.255883 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/d7c276fe-92e7-4429-b6f8-d9488337b369-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.256006 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d7c276fe-92e7-4429-b6f8-d9488337b369-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.256113 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7c276fe-92e7-4429-b6f8-d9488337b369-config-data\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.257449 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7c276fe-92e7-4429-b6f8-d9488337b369-config-data\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.257487 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d7c276fe-92e7-4429-b6f8-d9488337b369-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.257642 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.257804 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6c7rt\" (UniqueName: \"kubernetes.io/projected/d7c276fe-92e7-4429-b6f8-d9488337b369-kube-api-access-6c7rt\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.257851 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/d7c276fe-92e7-4429-b6f8-d9488337b369-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.257949 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.258004 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.263884 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.360224 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.360350 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6c7rt\" (UniqueName: \"kubernetes.io/projected/d7c276fe-92e7-4429-b6f8-d9488337b369-kube-api-access-6c7rt\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.360392 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/d7c276fe-92e7-4429-b6f8-d9488337b369-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.360460 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.360533 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.360621 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/d7c276fe-92e7-4429-b6f8-d9488337b369-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.360844 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.361250 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/d7c276fe-92e7-4429-b6f8-d9488337b369-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.365133 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/d7c276fe-92e7-4429-b6f8-d9488337b369-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.365983 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.368387 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.384201 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6c7rt\" (UniqueName: \"kubernetes.io/projected/d7c276fe-92e7-4429-b6f8-d9488337b369-kube-api-access-6c7rt\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.401515 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"tempest-tests-tempest\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " pod="openstack/tempest-tests-tempest" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.566658 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-q8nnz" Dec 02 17:31:20 crc kubenswrapper[4747]: I1202 17:31:20.697502 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 02 17:31:21 crc kubenswrapper[4747]: W1202 17:31:21.209324 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7c276fe_92e7_4429_b6f8_d9488337b369.slice/crio-e452edd614c119cfedea994f44a754d5bc5ab9b095e7e5dade526ecacc1a9b0b WatchSource:0}: Error finding container e452edd614c119cfedea994f44a754d5bc5ab9b095e7e5dade526ecacc1a9b0b: Status 404 returned error can't find the container with id e452edd614c119cfedea994f44a754d5bc5ab9b095e7e5dade526ecacc1a9b0b Dec 02 17:31:21 crc kubenswrapper[4747]: I1202 17:31:21.213019 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 02 17:31:21 crc kubenswrapper[4747]: I1202 17:31:21.336292 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x6chq"] Dec 02 17:31:21 crc kubenswrapper[4747]: I1202 17:31:21.524035 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"d7c276fe-92e7-4429-b6f8-d9488337b369","Type":"ContainerStarted","Data":"e452edd614c119cfedea994f44a754d5bc5ab9b095e7e5dade526ecacc1a9b0b"} Dec 02 17:31:21 crc kubenswrapper[4747]: I1202 17:31:21.524249 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-x6chq" podUID="54bf01ed-0add-4d3a-93ad-2dc405e7a79d" containerName="registry-server" containerID="cri-o://4459ad11b6a8bb267adfc801b73267a4c8fd25630509cd059f74ece8dcf657fe" gracePeriod=2 Dec 02 17:31:21 crc kubenswrapper[4747]: I1202 17:31:21.868217 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b85g2" Dec 02 17:31:21 crc kubenswrapper[4747]: I1202 17:31:21.868561 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b85g2" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.032603 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6chq" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.097195 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkrtt\" (UniqueName: \"kubernetes.io/projected/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-kube-api-access-xkrtt\") pod \"54bf01ed-0add-4d3a-93ad-2dc405e7a79d\" (UID: \"54bf01ed-0add-4d3a-93ad-2dc405e7a79d\") " Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.097386 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-utilities\") pod \"54bf01ed-0add-4d3a-93ad-2dc405e7a79d\" (UID: \"54bf01ed-0add-4d3a-93ad-2dc405e7a79d\") " Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.097535 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-catalog-content\") pod \"54bf01ed-0add-4d3a-93ad-2dc405e7a79d\" (UID: \"54bf01ed-0add-4d3a-93ad-2dc405e7a79d\") " Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.098129 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-utilities" (OuterVolumeSpecName: "utilities") pod "54bf01ed-0add-4d3a-93ad-2dc405e7a79d" (UID: "54bf01ed-0add-4d3a-93ad-2dc405e7a79d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.107147 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-kube-api-access-xkrtt" (OuterVolumeSpecName: "kube-api-access-xkrtt") pod "54bf01ed-0add-4d3a-93ad-2dc405e7a79d" (UID: "54bf01ed-0add-4d3a-93ad-2dc405e7a79d"). InnerVolumeSpecName "kube-api-access-xkrtt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.151669 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "54bf01ed-0add-4d3a-93ad-2dc405e7a79d" (UID: "54bf01ed-0add-4d3a-93ad-2dc405e7a79d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.199476 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkrtt\" (UniqueName: \"kubernetes.io/projected/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-kube-api-access-xkrtt\") on node \"crc\" DevicePath \"\"" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.199515 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.199525 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54bf01ed-0add-4d3a-93ad-2dc405e7a79d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.537666 4747 generic.go:334] "Generic (PLEG): container finished" podID="54bf01ed-0add-4d3a-93ad-2dc405e7a79d" containerID="4459ad11b6a8bb267adfc801b73267a4c8fd25630509cd059f74ece8dcf657fe" exitCode=0 Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.537726 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6chq" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.537746 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6chq" event={"ID":"54bf01ed-0add-4d3a-93ad-2dc405e7a79d","Type":"ContainerDied","Data":"4459ad11b6a8bb267adfc801b73267a4c8fd25630509cd059f74ece8dcf657fe"} Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.538255 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6chq" event={"ID":"54bf01ed-0add-4d3a-93ad-2dc405e7a79d","Type":"ContainerDied","Data":"550e1a07cce4653372546c288cfb20e955cce30a86f6b0dfb1601e05a62b1d14"} Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.538329 4747 scope.go:117] "RemoveContainer" containerID="4459ad11b6a8bb267adfc801b73267a4c8fd25630509cd059f74ece8dcf657fe" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.580453 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x6chq"] Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.583136 4747 scope.go:117] "RemoveContainer" containerID="3425cddafee6d5099cc8e8de470e11e96b7d3ebc258c9c8e72f035685d363fc5" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.588807 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-x6chq"] Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.608633 4747 scope.go:117] "RemoveContainer" containerID="b5db183501bc72f4cc43a5b13e0253533b8a1246d36512cf158f5045f7425e7b" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.660579 4747 scope.go:117] "RemoveContainer" containerID="4459ad11b6a8bb267adfc801b73267a4c8fd25630509cd059f74ece8dcf657fe" Dec 02 17:31:22 crc kubenswrapper[4747]: E1202 17:31:22.661203 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4459ad11b6a8bb267adfc801b73267a4c8fd25630509cd059f74ece8dcf657fe\": container with ID starting with 4459ad11b6a8bb267adfc801b73267a4c8fd25630509cd059f74ece8dcf657fe not found: ID does not exist" containerID="4459ad11b6a8bb267adfc801b73267a4c8fd25630509cd059f74ece8dcf657fe" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.661266 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4459ad11b6a8bb267adfc801b73267a4c8fd25630509cd059f74ece8dcf657fe"} err="failed to get container status \"4459ad11b6a8bb267adfc801b73267a4c8fd25630509cd059f74ece8dcf657fe\": rpc error: code = NotFound desc = could not find container \"4459ad11b6a8bb267adfc801b73267a4c8fd25630509cd059f74ece8dcf657fe\": container with ID starting with 4459ad11b6a8bb267adfc801b73267a4c8fd25630509cd059f74ece8dcf657fe not found: ID does not exist" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.661298 4747 scope.go:117] "RemoveContainer" containerID="3425cddafee6d5099cc8e8de470e11e96b7d3ebc258c9c8e72f035685d363fc5" Dec 02 17:31:22 crc kubenswrapper[4747]: E1202 17:31:22.661609 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3425cddafee6d5099cc8e8de470e11e96b7d3ebc258c9c8e72f035685d363fc5\": container with ID starting with 3425cddafee6d5099cc8e8de470e11e96b7d3ebc258c9c8e72f035685d363fc5 not found: ID does not exist" containerID="3425cddafee6d5099cc8e8de470e11e96b7d3ebc258c9c8e72f035685d363fc5" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.661657 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3425cddafee6d5099cc8e8de470e11e96b7d3ebc258c9c8e72f035685d363fc5"} err="failed to get container status \"3425cddafee6d5099cc8e8de470e11e96b7d3ebc258c9c8e72f035685d363fc5\": rpc error: code = NotFound desc = could not find container \"3425cddafee6d5099cc8e8de470e11e96b7d3ebc258c9c8e72f035685d363fc5\": container with ID starting with 3425cddafee6d5099cc8e8de470e11e96b7d3ebc258c9c8e72f035685d363fc5 not found: ID does not exist" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.661689 4747 scope.go:117] "RemoveContainer" containerID="b5db183501bc72f4cc43a5b13e0253533b8a1246d36512cf158f5045f7425e7b" Dec 02 17:31:22 crc kubenswrapper[4747]: E1202 17:31:22.662007 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5db183501bc72f4cc43a5b13e0253533b8a1246d36512cf158f5045f7425e7b\": container with ID starting with b5db183501bc72f4cc43a5b13e0253533b8a1246d36512cf158f5045f7425e7b not found: ID does not exist" containerID="b5db183501bc72f4cc43a5b13e0253533b8a1246d36512cf158f5045f7425e7b" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.662039 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5db183501bc72f4cc43a5b13e0253533b8a1246d36512cf158f5045f7425e7b"} err="failed to get container status \"b5db183501bc72f4cc43a5b13e0253533b8a1246d36512cf158f5045f7425e7b\": rpc error: code = NotFound desc = could not find container \"b5db183501bc72f4cc43a5b13e0253533b8a1246d36512cf158f5045f7425e7b\": container with ID starting with b5db183501bc72f4cc43a5b13e0253533b8a1246d36512cf158f5045f7425e7b not found: ID does not exist" Dec 02 17:31:22 crc kubenswrapper[4747]: I1202 17:31:22.930063 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-b85g2" podUID="ca465c6a-13c8-4a37-b14c-568f4fba4cdb" containerName="registry-server" probeResult="failure" output=< Dec 02 17:31:22 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Dec 02 17:31:22 crc kubenswrapper[4747]: > Dec 02 17:31:23 crc kubenswrapper[4747]: I1202 17:31:23.747194 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q8nnz"] Dec 02 17:31:23 crc kubenswrapper[4747]: I1202 17:31:23.747758 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-q8nnz" podUID="01816c39-7390-4439-b3df-59e76ccaebc9" containerName="registry-server" containerID="cri-o://70cc5f9d736007ff06336fde577b70bd2617508688b9949d3f1ae232f3408e5c" gracePeriod=2 Dec 02 17:31:23 crc kubenswrapper[4747]: I1202 17:31:23.787765 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54bf01ed-0add-4d3a-93ad-2dc405e7a79d" path="/var/lib/kubelet/pods/54bf01ed-0add-4d3a-93ad-2dc405e7a79d/volumes" Dec 02 17:31:24 crc kubenswrapper[4747]: I1202 17:31:24.564018 4747 generic.go:334] "Generic (PLEG): container finished" podID="01816c39-7390-4439-b3df-59e76ccaebc9" containerID="70cc5f9d736007ff06336fde577b70bd2617508688b9949d3f1ae232f3408e5c" exitCode=0 Dec 02 17:31:24 crc kubenswrapper[4747]: I1202 17:31:24.564068 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q8nnz" event={"ID":"01816c39-7390-4439-b3df-59e76ccaebc9","Type":"ContainerDied","Data":"70cc5f9d736007ff06336fde577b70bd2617508688b9949d3f1ae232f3408e5c"} Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.380110 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q8nnz" Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.431199 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01816c39-7390-4439-b3df-59e76ccaebc9-catalog-content\") pod \"01816c39-7390-4439-b3df-59e76ccaebc9\" (UID: \"01816c39-7390-4439-b3df-59e76ccaebc9\") " Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.431337 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01816c39-7390-4439-b3df-59e76ccaebc9-utilities\") pod \"01816c39-7390-4439-b3df-59e76ccaebc9\" (UID: \"01816c39-7390-4439-b3df-59e76ccaebc9\") " Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.431372 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htjk9\" (UniqueName: \"kubernetes.io/projected/01816c39-7390-4439-b3df-59e76ccaebc9-kube-api-access-htjk9\") pod \"01816c39-7390-4439-b3df-59e76ccaebc9\" (UID: \"01816c39-7390-4439-b3df-59e76ccaebc9\") " Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.432732 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01816c39-7390-4439-b3df-59e76ccaebc9-utilities" (OuterVolumeSpecName: "utilities") pod "01816c39-7390-4439-b3df-59e76ccaebc9" (UID: "01816c39-7390-4439-b3df-59e76ccaebc9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.438434 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01816c39-7390-4439-b3df-59e76ccaebc9-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.447512 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01816c39-7390-4439-b3df-59e76ccaebc9-kube-api-access-htjk9" (OuterVolumeSpecName: "kube-api-access-htjk9") pod "01816c39-7390-4439-b3df-59e76ccaebc9" (UID: "01816c39-7390-4439-b3df-59e76ccaebc9"). InnerVolumeSpecName "kube-api-access-htjk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.481014 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01816c39-7390-4439-b3df-59e76ccaebc9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "01816c39-7390-4439-b3df-59e76ccaebc9" (UID: "01816c39-7390-4439-b3df-59e76ccaebc9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.540401 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htjk9\" (UniqueName: \"kubernetes.io/projected/01816c39-7390-4439-b3df-59e76ccaebc9-kube-api-access-htjk9\") on node \"crc\" DevicePath \"\"" Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.540431 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01816c39-7390-4439-b3df-59e76ccaebc9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.615740 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q8nnz" event={"ID":"01816c39-7390-4439-b3df-59e76ccaebc9","Type":"ContainerDied","Data":"b5a946712d678276c8588299a7be161c60a08df51496fa5d3b7c8bc3cdd0a0b5"} Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.615801 4747 scope.go:117] "RemoveContainer" containerID="70cc5f9d736007ff06336fde577b70bd2617508688b9949d3f1ae232f3408e5c" Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.615932 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q8nnz" Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.660782 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q8nnz"] Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.667575 4747 scope.go:117] "RemoveContainer" containerID="6c207249f26d5fe73f94ef76b5cb59e000195d774cd19cd12bccb51c069ee987" Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.670192 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-q8nnz"] Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.688083 4747 scope.go:117] "RemoveContainer" containerID="9bedaf6f6ad67a74bbd93b4d34dbe70b455cff64ca08c6a806832868fbd7172d" Dec 02 17:31:27 crc kubenswrapper[4747]: I1202 17:31:27.769968 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01816c39-7390-4439-b3df-59e76ccaebc9" path="/var/lib/kubelet/pods/01816c39-7390-4439-b3df-59e76ccaebc9/volumes" Dec 02 17:31:31 crc kubenswrapper[4747]: I1202 17:31:31.795378 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:31:31 crc kubenswrapper[4747]: I1202 17:31:31.795779 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:31:31 crc kubenswrapper[4747]: I1202 17:31:31.795862 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 17:31:31 crc kubenswrapper[4747]: I1202 17:31:31.797125 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"301c861d5e42340432be248256a4d3834ff99a0200966e47edb34d095843e970"} pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 17:31:31 crc kubenswrapper[4747]: I1202 17:31:31.797236 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" containerID="cri-o://301c861d5e42340432be248256a4d3834ff99a0200966e47edb34d095843e970" gracePeriod=600 Dec 02 17:31:31 crc kubenswrapper[4747]: I1202 17:31:31.923282 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b85g2" Dec 02 17:31:32 crc kubenswrapper[4747]: I1202 17:31:32.005349 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b85g2" Dec 02 17:31:32 crc kubenswrapper[4747]: I1202 17:31:32.165250 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b85g2"] Dec 02 17:31:33 crc kubenswrapper[4747]: I1202 17:31:33.679817 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-b85g2" podUID="ca465c6a-13c8-4a37-b14c-568f4fba4cdb" containerName="registry-server" containerID="cri-o://6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059" gracePeriod=2 Dec 02 17:31:34 crc kubenswrapper[4747]: I1202 17:31:34.690206 4747 generic.go:334] "Generic (PLEG): container finished" podID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerID="301c861d5e42340432be248256a4d3834ff99a0200966e47edb34d095843e970" exitCode=0 Dec 02 17:31:34 crc kubenswrapper[4747]: I1202 17:31:34.690243 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerDied","Data":"301c861d5e42340432be248256a4d3834ff99a0200966e47edb34d095843e970"} Dec 02 17:31:34 crc kubenswrapper[4747]: I1202 17:31:34.690541 4747 scope.go:117] "RemoveContainer" containerID="2d5ac995fd2a64b34955e61d7dec0060325cc4b99c6756c15506a400da8e4c5d" Dec 02 17:31:36 crc kubenswrapper[4747]: I1202 17:31:36.811649 4747 generic.go:334] "Generic (PLEG): container finished" podID="ca465c6a-13c8-4a37-b14c-568f4fba4cdb" containerID="6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059" exitCode=0 Dec 02 17:31:36 crc kubenswrapper[4747]: I1202 17:31:36.811745 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b85g2" event={"ID":"ca465c6a-13c8-4a37-b14c-568f4fba4cdb","Type":"ContainerDied","Data":"6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059"} Dec 02 17:31:41 crc kubenswrapper[4747]: E1202 17:31:41.871063 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059 is running failed: container process not found" containerID="6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 17:31:41 crc kubenswrapper[4747]: E1202 17:31:41.873223 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059 is running failed: container process not found" containerID="6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 17:31:41 crc kubenswrapper[4747]: E1202 17:31:41.877371 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059 is running failed: container process not found" containerID="6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 17:31:41 crc kubenswrapper[4747]: E1202 17:31:41.877484 4747 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-b85g2" podUID="ca465c6a-13c8-4a37-b14c-568f4fba4cdb" containerName="registry-server" Dec 02 17:31:51 crc kubenswrapper[4747]: E1202 17:31:51.868275 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059 is running failed: container process not found" containerID="6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 17:31:51 crc kubenswrapper[4747]: E1202 17:31:51.869649 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059 is running failed: container process not found" containerID="6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 17:31:51 crc kubenswrapper[4747]: E1202 17:31:51.870291 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059 is running failed: container process not found" containerID="6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 17:31:51 crc kubenswrapper[4747]: E1202 17:31:51.870345 4747 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-b85g2" podUID="ca465c6a-13c8-4a37-b14c-568f4fba4cdb" containerName="registry-server" Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.036156 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b85g2" Dec 02 17:31:57 crc kubenswrapper[4747]: E1202 17:31:57.051677 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 02 17:31:57 crc kubenswrapper[4747]: E1202 17:31:57.051853 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6c7rt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(d7c276fe-92e7-4429-b6f8-d9488337b369): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 17:31:57 crc kubenswrapper[4747]: E1202 17:31:57.053075 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="d7c276fe-92e7-4429-b6f8-d9488337b369" Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.072033 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b85g2" event={"ID":"ca465c6a-13c8-4a37-b14c-568f4fba4cdb","Type":"ContainerDied","Data":"02da0337e7447e6f00254e8b2c0af713f916959665b6821a9aa672187220e7a9"} Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.072090 4747 scope.go:117] "RemoveContainer" containerID="6bec9128ca994f08cb4ece593b2af68502ed3145aa228792b9faaa01e320a059" Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.072231 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b85g2" Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.113368 4747 scope.go:117] "RemoveContainer" containerID="f3a12915d534c9247d70449c5ea303424c088175ee286957c68446fc8d7dd32c" Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.145876 4747 scope.go:117] "RemoveContainer" containerID="24d33bba422ffeba1c710a2f00ca57be9797429f3f50c26d5cb3d252e2468e50" Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.216978 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-catalog-content\") pod \"ca465c6a-13c8-4a37-b14c-568f4fba4cdb\" (UID: \"ca465c6a-13c8-4a37-b14c-568f4fba4cdb\") " Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.217185 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-utilities\") pod \"ca465c6a-13c8-4a37-b14c-568f4fba4cdb\" (UID: \"ca465c6a-13c8-4a37-b14c-568f4fba4cdb\") " Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.217337 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zbqhd\" (UniqueName: \"kubernetes.io/projected/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-kube-api-access-zbqhd\") pod \"ca465c6a-13c8-4a37-b14c-568f4fba4cdb\" (UID: \"ca465c6a-13c8-4a37-b14c-568f4fba4cdb\") " Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.217946 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-utilities" (OuterVolumeSpecName: "utilities") pod "ca465c6a-13c8-4a37-b14c-568f4fba4cdb" (UID: "ca465c6a-13c8-4a37-b14c-568f4fba4cdb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.223465 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-kube-api-access-zbqhd" (OuterVolumeSpecName: "kube-api-access-zbqhd") pod "ca465c6a-13c8-4a37-b14c-568f4fba4cdb" (UID: "ca465c6a-13c8-4a37-b14c-568f4fba4cdb"). InnerVolumeSpecName "kube-api-access-zbqhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.319557 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.319588 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zbqhd\" (UniqueName: \"kubernetes.io/projected/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-kube-api-access-zbqhd\") on node \"crc\" DevicePath \"\"" Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.337255 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ca465c6a-13c8-4a37-b14c-568f4fba4cdb" (UID: "ca465c6a-13c8-4a37-b14c-568f4fba4cdb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.406655 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b85g2"] Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.421111 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca465c6a-13c8-4a37-b14c-568f4fba4cdb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.424621 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-b85g2"] Dec 02 17:31:57 crc kubenswrapper[4747]: I1202 17:31:57.773208 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca465c6a-13c8-4a37-b14c-568f4fba4cdb" path="/var/lib/kubelet/pods/ca465c6a-13c8-4a37-b14c-568f4fba4cdb/volumes" Dec 02 17:31:58 crc kubenswrapper[4747]: I1202 17:31:58.114095 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112"} Dec 02 17:31:58 crc kubenswrapper[4747]: E1202 17:31:58.122387 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="d7c276fe-92e7-4429-b6f8-d9488337b369" Dec 02 17:32:12 crc kubenswrapper[4747]: I1202 17:32:12.173164 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 02 17:32:14 crc kubenswrapper[4747]: I1202 17:32:14.290101 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"d7c276fe-92e7-4429-b6f8-d9488337b369","Type":"ContainerStarted","Data":"87c9d52fd2e40583b069c60ae37331e60e8e88313344c2e172a2bb930dcb701f"} Dec 02 17:32:14 crc kubenswrapper[4747]: I1202 17:32:14.328135 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=5.373659589 podStartE2EDuration="56.32811647s" podCreationTimestamp="2025-12-02 17:31:18 +0000 UTC" firstStartedPulling="2025-12-02 17:31:21.21504505 +0000 UTC m=+2911.741933799" lastFinishedPulling="2025-12-02 17:32:12.169501911 +0000 UTC m=+2962.696390680" observedRunningTime="2025-12-02 17:32:14.320896275 +0000 UTC m=+2964.847785064" watchObservedRunningTime="2025-12-02 17:32:14.32811647 +0000 UTC m=+2964.855005219" Dec 02 17:34:01 crc kubenswrapper[4747]: I1202 17:34:01.795660 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:34:01 crc kubenswrapper[4747]: I1202 17:34:01.796475 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:34:31 crc kubenswrapper[4747]: I1202 17:34:31.794812 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:34:31 crc kubenswrapper[4747]: I1202 17:34:31.795517 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:35:01 crc kubenswrapper[4747]: I1202 17:35:01.795745 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:35:01 crc kubenswrapper[4747]: I1202 17:35:01.796701 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:35:01 crc kubenswrapper[4747]: I1202 17:35:01.798176 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 17:35:01 crc kubenswrapper[4747]: I1202 17:35:01.798851 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112"} pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 17:35:01 crc kubenswrapper[4747]: I1202 17:35:01.798919 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" containerID="cri-o://b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" gracePeriod=600 Dec 02 17:35:01 crc kubenswrapper[4747]: E1202 17:35:01.923940 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:35:02 crc kubenswrapper[4747]: I1202 17:35:02.146481 4747 generic.go:334] "Generic (PLEG): container finished" podID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" exitCode=0 Dec 02 17:35:02 crc kubenswrapper[4747]: I1202 17:35:02.146539 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerDied","Data":"b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112"} Dec 02 17:35:02 crc kubenswrapper[4747]: I1202 17:35:02.146583 4747 scope.go:117] "RemoveContainer" containerID="301c861d5e42340432be248256a4d3834ff99a0200966e47edb34d095843e970" Dec 02 17:35:02 crc kubenswrapper[4747]: I1202 17:35:02.147401 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:35:02 crc kubenswrapper[4747]: E1202 17:35:02.147801 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:35:14 crc kubenswrapper[4747]: I1202 17:35:14.760774 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:35:14 crc kubenswrapper[4747]: E1202 17:35:14.761746 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:35:25 crc kubenswrapper[4747]: I1202 17:35:25.760443 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:35:25 crc kubenswrapper[4747]: E1202 17:35:25.761733 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:35:40 crc kubenswrapper[4747]: I1202 17:35:40.760130 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:35:40 crc kubenswrapper[4747]: E1202 17:35:40.760971 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:35:54 crc kubenswrapper[4747]: I1202 17:35:54.762042 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:35:54 crc kubenswrapper[4747]: E1202 17:35:54.763336 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:36:06 crc kubenswrapper[4747]: I1202 17:36:06.761105 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:36:06 crc kubenswrapper[4747]: E1202 17:36:06.762303 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:36:20 crc kubenswrapper[4747]: I1202 17:36:20.760610 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:36:20 crc kubenswrapper[4747]: E1202 17:36:20.761689 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:36:33 crc kubenswrapper[4747]: I1202 17:36:33.760685 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:36:33 crc kubenswrapper[4747]: E1202 17:36:33.761880 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:36:44 crc kubenswrapper[4747]: I1202 17:36:44.761024 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:36:44 crc kubenswrapper[4747]: E1202 17:36:44.763648 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:36:58 crc kubenswrapper[4747]: I1202 17:36:58.761040 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:36:58 crc kubenswrapper[4747]: E1202 17:36:58.762083 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:37:11 crc kubenswrapper[4747]: I1202 17:37:11.762042 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:37:11 crc kubenswrapper[4747]: E1202 17:37:11.762793 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:37:22 crc kubenswrapper[4747]: I1202 17:37:22.760945 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:37:22 crc kubenswrapper[4747]: E1202 17:37:22.761776 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.726590 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bb955"] Dec 02 17:37:28 crc kubenswrapper[4747]: E1202 17:37:28.728024 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54bf01ed-0add-4d3a-93ad-2dc405e7a79d" containerName="extract-content" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.728053 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="54bf01ed-0add-4d3a-93ad-2dc405e7a79d" containerName="extract-content" Dec 02 17:37:28 crc kubenswrapper[4747]: E1202 17:37:28.728077 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01816c39-7390-4439-b3df-59e76ccaebc9" containerName="extract-utilities" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.728088 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="01816c39-7390-4439-b3df-59e76ccaebc9" containerName="extract-utilities" Dec 02 17:37:28 crc kubenswrapper[4747]: E1202 17:37:28.728121 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca465c6a-13c8-4a37-b14c-568f4fba4cdb" containerName="registry-server" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.728133 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca465c6a-13c8-4a37-b14c-568f4fba4cdb" containerName="registry-server" Dec 02 17:37:28 crc kubenswrapper[4747]: E1202 17:37:28.728152 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01816c39-7390-4439-b3df-59e76ccaebc9" containerName="registry-server" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.728162 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="01816c39-7390-4439-b3df-59e76ccaebc9" containerName="registry-server" Dec 02 17:37:28 crc kubenswrapper[4747]: E1202 17:37:28.728177 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01816c39-7390-4439-b3df-59e76ccaebc9" containerName="extract-content" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.728187 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="01816c39-7390-4439-b3df-59e76ccaebc9" containerName="extract-content" Dec 02 17:37:28 crc kubenswrapper[4747]: E1202 17:37:28.728215 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca465c6a-13c8-4a37-b14c-568f4fba4cdb" containerName="extract-utilities" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.728225 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca465c6a-13c8-4a37-b14c-568f4fba4cdb" containerName="extract-utilities" Dec 02 17:37:28 crc kubenswrapper[4747]: E1202 17:37:28.728237 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54bf01ed-0add-4d3a-93ad-2dc405e7a79d" containerName="registry-server" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.728246 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="54bf01ed-0add-4d3a-93ad-2dc405e7a79d" containerName="registry-server" Dec 02 17:37:28 crc kubenswrapper[4747]: E1202 17:37:28.728268 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca465c6a-13c8-4a37-b14c-568f4fba4cdb" containerName="extract-content" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.728278 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca465c6a-13c8-4a37-b14c-568f4fba4cdb" containerName="extract-content" Dec 02 17:37:28 crc kubenswrapper[4747]: E1202 17:37:28.728293 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54bf01ed-0add-4d3a-93ad-2dc405e7a79d" containerName="extract-utilities" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.728302 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="54bf01ed-0add-4d3a-93ad-2dc405e7a79d" containerName="extract-utilities" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.728630 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="54bf01ed-0add-4d3a-93ad-2dc405e7a79d" containerName="registry-server" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.728675 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="01816c39-7390-4439-b3df-59e76ccaebc9" containerName="registry-server" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.728693 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca465c6a-13c8-4a37-b14c-568f4fba4cdb" containerName="registry-server" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.730753 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bb955" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.745657 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bb955"] Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.864255 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23c93ea4-767a-4618-9d0f-8d339d2987de-catalog-content\") pod \"redhat-marketplace-bb955\" (UID: \"23c93ea4-767a-4618-9d0f-8d339d2987de\") " pod="openshift-marketplace/redhat-marketplace-bb955" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.864364 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23c93ea4-767a-4618-9d0f-8d339d2987de-utilities\") pod \"redhat-marketplace-bb955\" (UID: \"23c93ea4-767a-4618-9d0f-8d339d2987de\") " pod="openshift-marketplace/redhat-marketplace-bb955" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.864551 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9spnb\" (UniqueName: \"kubernetes.io/projected/23c93ea4-767a-4618-9d0f-8d339d2987de-kube-api-access-9spnb\") pod \"redhat-marketplace-bb955\" (UID: \"23c93ea4-767a-4618-9d0f-8d339d2987de\") " pod="openshift-marketplace/redhat-marketplace-bb955" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.965984 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9spnb\" (UniqueName: \"kubernetes.io/projected/23c93ea4-767a-4618-9d0f-8d339d2987de-kube-api-access-9spnb\") pod \"redhat-marketplace-bb955\" (UID: \"23c93ea4-767a-4618-9d0f-8d339d2987de\") " pod="openshift-marketplace/redhat-marketplace-bb955" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.966088 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23c93ea4-767a-4618-9d0f-8d339d2987de-catalog-content\") pod \"redhat-marketplace-bb955\" (UID: \"23c93ea4-767a-4618-9d0f-8d339d2987de\") " pod="openshift-marketplace/redhat-marketplace-bb955" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.966162 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23c93ea4-767a-4618-9d0f-8d339d2987de-utilities\") pod \"redhat-marketplace-bb955\" (UID: \"23c93ea4-767a-4618-9d0f-8d339d2987de\") " pod="openshift-marketplace/redhat-marketplace-bb955" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.966852 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23c93ea4-767a-4618-9d0f-8d339d2987de-utilities\") pod \"redhat-marketplace-bb955\" (UID: \"23c93ea4-767a-4618-9d0f-8d339d2987de\") " pod="openshift-marketplace/redhat-marketplace-bb955" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.967463 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23c93ea4-767a-4618-9d0f-8d339d2987de-catalog-content\") pod \"redhat-marketplace-bb955\" (UID: \"23c93ea4-767a-4618-9d0f-8d339d2987de\") " pod="openshift-marketplace/redhat-marketplace-bb955" Dec 02 17:37:28 crc kubenswrapper[4747]: I1202 17:37:28.995101 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9spnb\" (UniqueName: \"kubernetes.io/projected/23c93ea4-767a-4618-9d0f-8d339d2987de-kube-api-access-9spnb\") pod \"redhat-marketplace-bb955\" (UID: \"23c93ea4-767a-4618-9d0f-8d339d2987de\") " pod="openshift-marketplace/redhat-marketplace-bb955" Dec 02 17:37:29 crc kubenswrapper[4747]: I1202 17:37:29.060788 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bb955" Dec 02 17:37:29 crc kubenswrapper[4747]: W1202 17:37:29.594714 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23c93ea4_767a_4618_9d0f_8d339d2987de.slice/crio-cc959ec40e099884e04c10c68150f473584193abb8f1cb2847f895a5edb148e8 WatchSource:0}: Error finding container cc959ec40e099884e04c10c68150f473584193abb8f1cb2847f895a5edb148e8: Status 404 returned error can't find the container with id cc959ec40e099884e04c10c68150f473584193abb8f1cb2847f895a5edb148e8 Dec 02 17:37:29 crc kubenswrapper[4747]: I1202 17:37:29.597136 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bb955"] Dec 02 17:37:29 crc kubenswrapper[4747]: I1202 17:37:29.892422 4747 generic.go:334] "Generic (PLEG): container finished" podID="23c93ea4-767a-4618-9d0f-8d339d2987de" containerID="d98c8d2000ce86fad7e2b127f3a94c7fc9c45987a831efb747a7d3222d92ffc6" exitCode=0 Dec 02 17:37:29 crc kubenswrapper[4747]: I1202 17:37:29.892590 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bb955" event={"ID":"23c93ea4-767a-4618-9d0f-8d339d2987de","Type":"ContainerDied","Data":"d98c8d2000ce86fad7e2b127f3a94c7fc9c45987a831efb747a7d3222d92ffc6"} Dec 02 17:37:29 crc kubenswrapper[4747]: I1202 17:37:29.892705 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bb955" event={"ID":"23c93ea4-767a-4618-9d0f-8d339d2987de","Type":"ContainerStarted","Data":"cc959ec40e099884e04c10c68150f473584193abb8f1cb2847f895a5edb148e8"} Dec 02 17:37:29 crc kubenswrapper[4747]: I1202 17:37:29.894243 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 17:37:30 crc kubenswrapper[4747]: I1202 17:37:30.913350 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bb955" event={"ID":"23c93ea4-767a-4618-9d0f-8d339d2987de","Type":"ContainerStarted","Data":"7f96339e3727a509aff47d006d04ea313316b57b39c65295043c71a330211b26"} Dec 02 17:37:31 crc kubenswrapper[4747]: I1202 17:37:31.925607 4747 generic.go:334] "Generic (PLEG): container finished" podID="23c93ea4-767a-4618-9d0f-8d339d2987de" containerID="7f96339e3727a509aff47d006d04ea313316b57b39c65295043c71a330211b26" exitCode=0 Dec 02 17:37:31 crc kubenswrapper[4747]: I1202 17:37:31.925678 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bb955" event={"ID":"23c93ea4-767a-4618-9d0f-8d339d2987de","Type":"ContainerDied","Data":"7f96339e3727a509aff47d006d04ea313316b57b39c65295043c71a330211b26"} Dec 02 17:37:33 crc kubenswrapper[4747]: I1202 17:37:33.947662 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bb955" event={"ID":"23c93ea4-767a-4618-9d0f-8d339d2987de","Type":"ContainerStarted","Data":"3dc0e28cb6e5eeecf3c3211b559915c9e69732acbe2a3c398301dc9c99a2f291"} Dec 02 17:37:33 crc kubenswrapper[4747]: I1202 17:37:33.972002 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bb955" podStartSLOduration=2.989136438 podStartE2EDuration="5.971983869s" podCreationTimestamp="2025-12-02 17:37:28 +0000 UTC" firstStartedPulling="2025-12-02 17:37:29.894034013 +0000 UTC m=+3280.420922752" lastFinishedPulling="2025-12-02 17:37:32.876881434 +0000 UTC m=+3283.403770183" observedRunningTime="2025-12-02 17:37:33.967578013 +0000 UTC m=+3284.494466762" watchObservedRunningTime="2025-12-02 17:37:33.971983869 +0000 UTC m=+3284.498872618" Dec 02 17:37:36 crc kubenswrapper[4747]: I1202 17:37:36.760779 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:37:36 crc kubenswrapper[4747]: E1202 17:37:36.762643 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:37:39 crc kubenswrapper[4747]: I1202 17:37:39.060975 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bb955" Dec 02 17:37:39 crc kubenswrapper[4747]: I1202 17:37:39.061523 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bb955" Dec 02 17:37:39 crc kubenswrapper[4747]: I1202 17:37:39.166677 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bb955" Dec 02 17:37:40 crc kubenswrapper[4747]: I1202 17:37:40.077036 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bb955" Dec 02 17:37:40 crc kubenswrapper[4747]: I1202 17:37:40.139800 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bb955"] Dec 02 17:37:42 crc kubenswrapper[4747]: I1202 17:37:42.051376 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bb955" podUID="23c93ea4-767a-4618-9d0f-8d339d2987de" containerName="registry-server" containerID="cri-o://3dc0e28cb6e5eeecf3c3211b559915c9e69732acbe2a3c398301dc9c99a2f291" gracePeriod=2 Dec 02 17:37:42 crc kubenswrapper[4747]: I1202 17:37:42.629332 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bb955" Dec 02 17:37:42 crc kubenswrapper[4747]: I1202 17:37:42.760188 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23c93ea4-767a-4618-9d0f-8d339d2987de-catalog-content\") pod \"23c93ea4-767a-4618-9d0f-8d339d2987de\" (UID: \"23c93ea4-767a-4618-9d0f-8d339d2987de\") " Dec 02 17:37:42 crc kubenswrapper[4747]: I1202 17:37:42.760346 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9spnb\" (UniqueName: \"kubernetes.io/projected/23c93ea4-767a-4618-9d0f-8d339d2987de-kube-api-access-9spnb\") pod \"23c93ea4-767a-4618-9d0f-8d339d2987de\" (UID: \"23c93ea4-767a-4618-9d0f-8d339d2987de\") " Dec 02 17:37:42 crc kubenswrapper[4747]: I1202 17:37:42.760373 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23c93ea4-767a-4618-9d0f-8d339d2987de-utilities\") pod \"23c93ea4-767a-4618-9d0f-8d339d2987de\" (UID: \"23c93ea4-767a-4618-9d0f-8d339d2987de\") " Dec 02 17:37:42 crc kubenswrapper[4747]: I1202 17:37:42.761785 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23c93ea4-767a-4618-9d0f-8d339d2987de-utilities" (OuterVolumeSpecName: "utilities") pod "23c93ea4-767a-4618-9d0f-8d339d2987de" (UID: "23c93ea4-767a-4618-9d0f-8d339d2987de"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:37:42 crc kubenswrapper[4747]: I1202 17:37:42.766559 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23c93ea4-767a-4618-9d0f-8d339d2987de-kube-api-access-9spnb" (OuterVolumeSpecName: "kube-api-access-9spnb") pod "23c93ea4-767a-4618-9d0f-8d339d2987de" (UID: "23c93ea4-767a-4618-9d0f-8d339d2987de"). InnerVolumeSpecName "kube-api-access-9spnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:37:42 crc kubenswrapper[4747]: I1202 17:37:42.779978 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23c93ea4-767a-4618-9d0f-8d339d2987de-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "23c93ea4-767a-4618-9d0f-8d339d2987de" (UID: "23c93ea4-767a-4618-9d0f-8d339d2987de"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:37:42 crc kubenswrapper[4747]: I1202 17:37:42.862707 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23c93ea4-767a-4618-9d0f-8d339d2987de-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:37:42 crc kubenswrapper[4747]: I1202 17:37:42.863074 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9spnb\" (UniqueName: \"kubernetes.io/projected/23c93ea4-767a-4618-9d0f-8d339d2987de-kube-api-access-9spnb\") on node \"crc\" DevicePath \"\"" Dec 02 17:37:42 crc kubenswrapper[4747]: I1202 17:37:42.863171 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23c93ea4-767a-4618-9d0f-8d339d2987de-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:37:43 crc kubenswrapper[4747]: I1202 17:37:43.064582 4747 generic.go:334] "Generic (PLEG): container finished" podID="23c93ea4-767a-4618-9d0f-8d339d2987de" containerID="3dc0e28cb6e5eeecf3c3211b559915c9e69732acbe2a3c398301dc9c99a2f291" exitCode=0 Dec 02 17:37:43 crc kubenswrapper[4747]: I1202 17:37:43.064656 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bb955" event={"ID":"23c93ea4-767a-4618-9d0f-8d339d2987de","Type":"ContainerDied","Data":"3dc0e28cb6e5eeecf3c3211b559915c9e69732acbe2a3c398301dc9c99a2f291"} Dec 02 17:37:43 crc kubenswrapper[4747]: I1202 17:37:43.064660 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bb955" Dec 02 17:37:43 crc kubenswrapper[4747]: I1202 17:37:43.064719 4747 scope.go:117] "RemoveContainer" containerID="3dc0e28cb6e5eeecf3c3211b559915c9e69732acbe2a3c398301dc9c99a2f291" Dec 02 17:37:43 crc kubenswrapper[4747]: I1202 17:37:43.064702 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bb955" event={"ID":"23c93ea4-767a-4618-9d0f-8d339d2987de","Type":"ContainerDied","Data":"cc959ec40e099884e04c10c68150f473584193abb8f1cb2847f895a5edb148e8"} Dec 02 17:37:43 crc kubenswrapper[4747]: I1202 17:37:43.087930 4747 scope.go:117] "RemoveContainer" containerID="7f96339e3727a509aff47d006d04ea313316b57b39c65295043c71a330211b26" Dec 02 17:37:43 crc kubenswrapper[4747]: I1202 17:37:43.124352 4747 scope.go:117] "RemoveContainer" containerID="d98c8d2000ce86fad7e2b127f3a94c7fc9c45987a831efb747a7d3222d92ffc6" Dec 02 17:37:43 crc kubenswrapper[4747]: I1202 17:37:43.127975 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bb955"] Dec 02 17:37:43 crc kubenswrapper[4747]: I1202 17:37:43.144358 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bb955"] Dec 02 17:37:43 crc kubenswrapper[4747]: I1202 17:37:43.163218 4747 scope.go:117] "RemoveContainer" containerID="3dc0e28cb6e5eeecf3c3211b559915c9e69732acbe2a3c398301dc9c99a2f291" Dec 02 17:37:43 crc kubenswrapper[4747]: E1202 17:37:43.163626 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dc0e28cb6e5eeecf3c3211b559915c9e69732acbe2a3c398301dc9c99a2f291\": container with ID starting with 3dc0e28cb6e5eeecf3c3211b559915c9e69732acbe2a3c398301dc9c99a2f291 not found: ID does not exist" containerID="3dc0e28cb6e5eeecf3c3211b559915c9e69732acbe2a3c398301dc9c99a2f291" Dec 02 17:37:43 crc kubenswrapper[4747]: I1202 17:37:43.163667 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dc0e28cb6e5eeecf3c3211b559915c9e69732acbe2a3c398301dc9c99a2f291"} err="failed to get container status \"3dc0e28cb6e5eeecf3c3211b559915c9e69732acbe2a3c398301dc9c99a2f291\": rpc error: code = NotFound desc = could not find container \"3dc0e28cb6e5eeecf3c3211b559915c9e69732acbe2a3c398301dc9c99a2f291\": container with ID starting with 3dc0e28cb6e5eeecf3c3211b559915c9e69732acbe2a3c398301dc9c99a2f291 not found: ID does not exist" Dec 02 17:37:43 crc kubenswrapper[4747]: I1202 17:37:43.163695 4747 scope.go:117] "RemoveContainer" containerID="7f96339e3727a509aff47d006d04ea313316b57b39c65295043c71a330211b26" Dec 02 17:37:43 crc kubenswrapper[4747]: E1202 17:37:43.164008 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f96339e3727a509aff47d006d04ea313316b57b39c65295043c71a330211b26\": container with ID starting with 7f96339e3727a509aff47d006d04ea313316b57b39c65295043c71a330211b26 not found: ID does not exist" containerID="7f96339e3727a509aff47d006d04ea313316b57b39c65295043c71a330211b26" Dec 02 17:37:43 crc kubenswrapper[4747]: I1202 17:37:43.164049 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f96339e3727a509aff47d006d04ea313316b57b39c65295043c71a330211b26"} err="failed to get container status \"7f96339e3727a509aff47d006d04ea313316b57b39c65295043c71a330211b26\": rpc error: code = NotFound desc = could not find container \"7f96339e3727a509aff47d006d04ea313316b57b39c65295043c71a330211b26\": container with ID starting with 7f96339e3727a509aff47d006d04ea313316b57b39c65295043c71a330211b26 not found: ID does not exist" Dec 02 17:37:43 crc kubenswrapper[4747]: I1202 17:37:43.164076 4747 scope.go:117] "RemoveContainer" containerID="d98c8d2000ce86fad7e2b127f3a94c7fc9c45987a831efb747a7d3222d92ffc6" Dec 02 17:37:43 crc kubenswrapper[4747]: E1202 17:37:43.164359 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d98c8d2000ce86fad7e2b127f3a94c7fc9c45987a831efb747a7d3222d92ffc6\": container with ID starting with d98c8d2000ce86fad7e2b127f3a94c7fc9c45987a831efb747a7d3222d92ffc6 not found: ID does not exist" containerID="d98c8d2000ce86fad7e2b127f3a94c7fc9c45987a831efb747a7d3222d92ffc6" Dec 02 17:37:43 crc kubenswrapper[4747]: I1202 17:37:43.164387 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d98c8d2000ce86fad7e2b127f3a94c7fc9c45987a831efb747a7d3222d92ffc6"} err="failed to get container status \"d98c8d2000ce86fad7e2b127f3a94c7fc9c45987a831efb747a7d3222d92ffc6\": rpc error: code = NotFound desc = could not find container \"d98c8d2000ce86fad7e2b127f3a94c7fc9c45987a831efb747a7d3222d92ffc6\": container with ID starting with d98c8d2000ce86fad7e2b127f3a94c7fc9c45987a831efb747a7d3222d92ffc6 not found: ID does not exist" Dec 02 17:37:43 crc kubenswrapper[4747]: I1202 17:37:43.780568 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23c93ea4-767a-4618-9d0f-8d339d2987de" path="/var/lib/kubelet/pods/23c93ea4-767a-4618-9d0f-8d339d2987de/volumes" Dec 02 17:37:51 crc kubenswrapper[4747]: I1202 17:37:51.761811 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:37:51 crc kubenswrapper[4747]: E1202 17:37:51.762585 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:38:06 crc kubenswrapper[4747]: I1202 17:38:06.761155 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:38:06 crc kubenswrapper[4747]: E1202 17:38:06.762329 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:38:21 crc kubenswrapper[4747]: I1202 17:38:21.761395 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:38:21 crc kubenswrapper[4747]: E1202 17:38:21.762477 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:38:35 crc kubenswrapper[4747]: I1202 17:38:35.760252 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:38:35 crc kubenswrapper[4747]: E1202 17:38:35.761096 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:38:48 crc kubenswrapper[4747]: I1202 17:38:48.760326 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:38:48 crc kubenswrapper[4747]: E1202 17:38:48.761232 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:39:00 crc kubenswrapper[4747]: I1202 17:39:00.761613 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:39:00 crc kubenswrapper[4747]: E1202 17:39:00.762334 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:39:14 crc kubenswrapper[4747]: I1202 17:39:14.761088 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:39:14 crc kubenswrapper[4747]: E1202 17:39:14.761724 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:39:29 crc kubenswrapper[4747]: I1202 17:39:29.766291 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:39:29 crc kubenswrapper[4747]: E1202 17:39:29.767528 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:39:42 crc kubenswrapper[4747]: I1202 17:39:42.761246 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:39:42 crc kubenswrapper[4747]: E1202 17:39:42.762285 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:39:53 crc kubenswrapper[4747]: I1202 17:39:53.762344 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:39:53 crc kubenswrapper[4747]: E1202 17:39:53.763741 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:40:08 crc kubenswrapper[4747]: I1202 17:40:08.762111 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:40:09 crc kubenswrapper[4747]: I1202 17:40:09.672434 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"4088f7e38f3f9699e8568820b04c28ef358afd21592a256df06f5bb3fcc89d9d"} Dec 02 17:42:23 crc kubenswrapper[4747]: I1202 17:42:23.542563 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kql79"] Dec 02 17:42:23 crc kubenswrapper[4747]: E1202 17:42:23.543596 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23c93ea4-767a-4618-9d0f-8d339d2987de" containerName="extract-utilities" Dec 02 17:42:23 crc kubenswrapper[4747]: I1202 17:42:23.543610 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="23c93ea4-767a-4618-9d0f-8d339d2987de" containerName="extract-utilities" Dec 02 17:42:23 crc kubenswrapper[4747]: E1202 17:42:23.543634 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23c93ea4-767a-4618-9d0f-8d339d2987de" containerName="registry-server" Dec 02 17:42:23 crc kubenswrapper[4747]: I1202 17:42:23.543640 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="23c93ea4-767a-4618-9d0f-8d339d2987de" containerName="registry-server" Dec 02 17:42:23 crc kubenswrapper[4747]: E1202 17:42:23.543648 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23c93ea4-767a-4618-9d0f-8d339d2987de" containerName="extract-content" Dec 02 17:42:23 crc kubenswrapper[4747]: I1202 17:42:23.543654 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="23c93ea4-767a-4618-9d0f-8d339d2987de" containerName="extract-content" Dec 02 17:42:23 crc kubenswrapper[4747]: I1202 17:42:23.543839 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="23c93ea4-767a-4618-9d0f-8d339d2987de" containerName="registry-server" Dec 02 17:42:23 crc kubenswrapper[4747]: I1202 17:42:23.545382 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kql79" Dec 02 17:42:23 crc kubenswrapper[4747]: I1202 17:42:23.587277 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nkf9\" (UniqueName: \"kubernetes.io/projected/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-kube-api-access-2nkf9\") pod \"certified-operators-kql79\" (UID: \"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b\") " pod="openshift-marketplace/certified-operators-kql79" Dec 02 17:42:23 crc kubenswrapper[4747]: I1202 17:42:23.587425 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-utilities\") pod \"certified-operators-kql79\" (UID: \"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b\") " pod="openshift-marketplace/certified-operators-kql79" Dec 02 17:42:23 crc kubenswrapper[4747]: I1202 17:42:23.587527 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-catalog-content\") pod \"certified-operators-kql79\" (UID: \"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b\") " pod="openshift-marketplace/certified-operators-kql79" Dec 02 17:42:23 crc kubenswrapper[4747]: I1202 17:42:23.614261 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kql79"] Dec 02 17:42:23 crc kubenswrapper[4747]: I1202 17:42:23.689102 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nkf9\" (UniqueName: \"kubernetes.io/projected/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-kube-api-access-2nkf9\") pod \"certified-operators-kql79\" (UID: \"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b\") " pod="openshift-marketplace/certified-operators-kql79" Dec 02 17:42:23 crc kubenswrapper[4747]: I1202 17:42:23.689153 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-utilities\") pod \"certified-operators-kql79\" (UID: \"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b\") " pod="openshift-marketplace/certified-operators-kql79" Dec 02 17:42:23 crc kubenswrapper[4747]: I1202 17:42:23.689184 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-catalog-content\") pod \"certified-operators-kql79\" (UID: \"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b\") " pod="openshift-marketplace/certified-operators-kql79" Dec 02 17:42:23 crc kubenswrapper[4747]: I1202 17:42:23.689656 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-catalog-content\") pod \"certified-operators-kql79\" (UID: \"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b\") " pod="openshift-marketplace/certified-operators-kql79" Dec 02 17:42:23 crc kubenswrapper[4747]: I1202 17:42:23.690161 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-utilities\") pod \"certified-operators-kql79\" (UID: \"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b\") " pod="openshift-marketplace/certified-operators-kql79" Dec 02 17:42:23 crc kubenswrapper[4747]: I1202 17:42:23.717982 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nkf9\" (UniqueName: \"kubernetes.io/projected/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-kube-api-access-2nkf9\") pod \"certified-operators-kql79\" (UID: \"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b\") " pod="openshift-marketplace/certified-operators-kql79" Dec 02 17:42:23 crc kubenswrapper[4747]: I1202 17:42:23.902289 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kql79" Dec 02 17:42:24 crc kubenswrapper[4747]: I1202 17:42:24.435517 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kql79"] Dec 02 17:42:25 crc kubenswrapper[4747]: I1202 17:42:25.262260 4747 generic.go:334] "Generic (PLEG): container finished" podID="fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b" containerID="da662fd81b69dd22b12094fb091a0d2374e7d65353ed00fff461e24d67733287" exitCode=0 Dec 02 17:42:25 crc kubenswrapper[4747]: I1202 17:42:25.262362 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kql79" event={"ID":"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b","Type":"ContainerDied","Data":"da662fd81b69dd22b12094fb091a0d2374e7d65353ed00fff461e24d67733287"} Dec 02 17:42:25 crc kubenswrapper[4747]: I1202 17:42:25.262573 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kql79" event={"ID":"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b","Type":"ContainerStarted","Data":"74d1e75f43864c124ce547af5127936da81f2953893888306fb405e588bbf831"} Dec 02 17:42:27 crc kubenswrapper[4747]: I1202 17:42:27.283047 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kql79" event={"ID":"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b","Type":"ContainerStarted","Data":"d9f834e75d11cc90c12f86394611cef4d733aae27bf1382fb5731f2781645ab0"} Dec 02 17:42:28 crc kubenswrapper[4747]: I1202 17:42:28.298314 4747 generic.go:334] "Generic (PLEG): container finished" podID="fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b" containerID="d9f834e75d11cc90c12f86394611cef4d733aae27bf1382fb5731f2781645ab0" exitCode=0 Dec 02 17:42:28 crc kubenswrapper[4747]: I1202 17:42:28.298461 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kql79" event={"ID":"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b","Type":"ContainerDied","Data":"d9f834e75d11cc90c12f86394611cef4d733aae27bf1382fb5731f2781645ab0"} Dec 02 17:42:29 crc kubenswrapper[4747]: I1202 17:42:29.314370 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kql79" event={"ID":"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b","Type":"ContainerStarted","Data":"e798712d1dd9f2db1ab25289b1412274240720f9828f9ef87455e4525dfc1c3e"} Dec 02 17:42:29 crc kubenswrapper[4747]: I1202 17:42:29.348372 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kql79" podStartSLOduration=2.752570082 podStartE2EDuration="6.34834292s" podCreationTimestamp="2025-12-02 17:42:23 +0000 UTC" firstStartedPulling="2025-12-02 17:42:25.264739705 +0000 UTC m=+3575.791628454" lastFinishedPulling="2025-12-02 17:42:28.860512513 +0000 UTC m=+3579.387401292" observedRunningTime="2025-12-02 17:42:29.336695558 +0000 UTC m=+3579.863584307" watchObservedRunningTime="2025-12-02 17:42:29.34834292 +0000 UTC m=+3579.875231679" Dec 02 17:42:31 crc kubenswrapper[4747]: I1202 17:42:31.795647 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:42:31 crc kubenswrapper[4747]: I1202 17:42:31.797021 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:42:33 crc kubenswrapper[4747]: I1202 17:42:33.903287 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kql79" Dec 02 17:42:33 crc kubenswrapper[4747]: I1202 17:42:33.904833 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kql79" Dec 02 17:42:33 crc kubenswrapper[4747]: I1202 17:42:33.973385 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kql79" Dec 02 17:42:34 crc kubenswrapper[4747]: I1202 17:42:34.483145 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kql79" Dec 02 17:42:34 crc kubenswrapper[4747]: I1202 17:42:34.540830 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kql79"] Dec 02 17:42:36 crc kubenswrapper[4747]: I1202 17:42:36.416230 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kql79" podUID="fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b" containerName="registry-server" containerID="cri-o://e798712d1dd9f2db1ab25289b1412274240720f9828f9ef87455e4525dfc1c3e" gracePeriod=2 Dec 02 17:42:36 crc kubenswrapper[4747]: I1202 17:42:36.944943 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kql79" Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.064599 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nkf9\" (UniqueName: \"kubernetes.io/projected/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-kube-api-access-2nkf9\") pod \"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b\" (UID: \"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b\") " Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.065016 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-utilities\") pod \"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b\" (UID: \"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b\") " Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.065064 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-catalog-content\") pod \"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b\" (UID: \"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b\") " Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.065843 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-utilities" (OuterVolumeSpecName: "utilities") pod "fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b" (UID: "fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.072391 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-kube-api-access-2nkf9" (OuterVolumeSpecName: "kube-api-access-2nkf9") pod "fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b" (UID: "fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b"). InnerVolumeSpecName "kube-api-access-2nkf9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.145885 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b" (UID: "fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.167623 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.167841 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.167859 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nkf9\" (UniqueName: \"kubernetes.io/projected/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b-kube-api-access-2nkf9\") on node \"crc\" DevicePath \"\"" Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.433471 4747 generic.go:334] "Generic (PLEG): container finished" podID="fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b" containerID="e798712d1dd9f2db1ab25289b1412274240720f9828f9ef87455e4525dfc1c3e" exitCode=0 Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.433510 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kql79" Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.433523 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kql79" event={"ID":"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b","Type":"ContainerDied","Data":"e798712d1dd9f2db1ab25289b1412274240720f9828f9ef87455e4525dfc1c3e"} Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.433628 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kql79" event={"ID":"fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b","Type":"ContainerDied","Data":"74d1e75f43864c124ce547af5127936da81f2953893888306fb405e588bbf831"} Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.433674 4747 scope.go:117] "RemoveContainer" containerID="e798712d1dd9f2db1ab25289b1412274240720f9828f9ef87455e4525dfc1c3e" Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.473471 4747 scope.go:117] "RemoveContainer" containerID="d9f834e75d11cc90c12f86394611cef4d733aae27bf1382fb5731f2781645ab0" Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.473601 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kql79"] Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.487158 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-kql79"] Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.507054 4747 scope.go:117] "RemoveContainer" containerID="da662fd81b69dd22b12094fb091a0d2374e7d65353ed00fff461e24d67733287" Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.570915 4747 scope.go:117] "RemoveContainer" containerID="e798712d1dd9f2db1ab25289b1412274240720f9828f9ef87455e4525dfc1c3e" Dec 02 17:42:37 crc kubenswrapper[4747]: E1202 17:42:37.571484 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e798712d1dd9f2db1ab25289b1412274240720f9828f9ef87455e4525dfc1c3e\": container with ID starting with e798712d1dd9f2db1ab25289b1412274240720f9828f9ef87455e4525dfc1c3e not found: ID does not exist" containerID="e798712d1dd9f2db1ab25289b1412274240720f9828f9ef87455e4525dfc1c3e" Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.571524 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e798712d1dd9f2db1ab25289b1412274240720f9828f9ef87455e4525dfc1c3e"} err="failed to get container status \"e798712d1dd9f2db1ab25289b1412274240720f9828f9ef87455e4525dfc1c3e\": rpc error: code = NotFound desc = could not find container \"e798712d1dd9f2db1ab25289b1412274240720f9828f9ef87455e4525dfc1c3e\": container with ID starting with e798712d1dd9f2db1ab25289b1412274240720f9828f9ef87455e4525dfc1c3e not found: ID does not exist" Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.571549 4747 scope.go:117] "RemoveContainer" containerID="d9f834e75d11cc90c12f86394611cef4d733aae27bf1382fb5731f2781645ab0" Dec 02 17:42:37 crc kubenswrapper[4747]: E1202 17:42:37.571893 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9f834e75d11cc90c12f86394611cef4d733aae27bf1382fb5731f2781645ab0\": container with ID starting with d9f834e75d11cc90c12f86394611cef4d733aae27bf1382fb5731f2781645ab0 not found: ID does not exist" containerID="d9f834e75d11cc90c12f86394611cef4d733aae27bf1382fb5731f2781645ab0" Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.571925 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9f834e75d11cc90c12f86394611cef4d733aae27bf1382fb5731f2781645ab0"} err="failed to get container status \"d9f834e75d11cc90c12f86394611cef4d733aae27bf1382fb5731f2781645ab0\": rpc error: code = NotFound desc = could not find container \"d9f834e75d11cc90c12f86394611cef4d733aae27bf1382fb5731f2781645ab0\": container with ID starting with d9f834e75d11cc90c12f86394611cef4d733aae27bf1382fb5731f2781645ab0 not found: ID does not exist" Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.571940 4747 scope.go:117] "RemoveContainer" containerID="da662fd81b69dd22b12094fb091a0d2374e7d65353ed00fff461e24d67733287" Dec 02 17:42:37 crc kubenswrapper[4747]: E1202 17:42:37.572241 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da662fd81b69dd22b12094fb091a0d2374e7d65353ed00fff461e24d67733287\": container with ID starting with da662fd81b69dd22b12094fb091a0d2374e7d65353ed00fff461e24d67733287 not found: ID does not exist" containerID="da662fd81b69dd22b12094fb091a0d2374e7d65353ed00fff461e24d67733287" Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.572277 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da662fd81b69dd22b12094fb091a0d2374e7d65353ed00fff461e24d67733287"} err="failed to get container status \"da662fd81b69dd22b12094fb091a0d2374e7d65353ed00fff461e24d67733287\": rpc error: code = NotFound desc = could not find container \"da662fd81b69dd22b12094fb091a0d2374e7d65353ed00fff461e24d67733287\": container with ID starting with da662fd81b69dd22b12094fb091a0d2374e7d65353ed00fff461e24d67733287 not found: ID does not exist" Dec 02 17:42:37 crc kubenswrapper[4747]: I1202 17:42:37.777586 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b" path="/var/lib/kubelet/pods/fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b/volumes" Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.364390 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fv7zw"] Dec 02 17:43:01 crc kubenswrapper[4747]: E1202 17:43:01.365423 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b" containerName="registry-server" Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.365437 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b" containerName="registry-server" Dec 02 17:43:01 crc kubenswrapper[4747]: E1202 17:43:01.365456 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b" containerName="extract-utilities" Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.365464 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b" containerName="extract-utilities" Dec 02 17:43:01 crc kubenswrapper[4747]: E1202 17:43:01.365477 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b" containerName="extract-content" Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.365484 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b" containerName="extract-content" Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.365705 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb47e155-a9ee-4aa5-90e9-0c96f99b1b0b" containerName="registry-server" Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.367138 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fv7zw" Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.375104 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fv7zw"] Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.492134 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/215e39ba-d1d9-4ca5-8b00-0efe5003c392-catalog-content\") pod \"redhat-operators-fv7zw\" (UID: \"215e39ba-d1d9-4ca5-8b00-0efe5003c392\") " pod="openshift-marketplace/redhat-operators-fv7zw" Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.492229 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/215e39ba-d1d9-4ca5-8b00-0efe5003c392-utilities\") pod \"redhat-operators-fv7zw\" (UID: \"215e39ba-d1d9-4ca5-8b00-0efe5003c392\") " pod="openshift-marketplace/redhat-operators-fv7zw" Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.492258 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhlqg\" (UniqueName: \"kubernetes.io/projected/215e39ba-d1d9-4ca5-8b00-0efe5003c392-kube-api-access-hhlqg\") pod \"redhat-operators-fv7zw\" (UID: \"215e39ba-d1d9-4ca5-8b00-0efe5003c392\") " pod="openshift-marketplace/redhat-operators-fv7zw" Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.594230 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/215e39ba-d1d9-4ca5-8b00-0efe5003c392-catalog-content\") pod \"redhat-operators-fv7zw\" (UID: \"215e39ba-d1d9-4ca5-8b00-0efe5003c392\") " pod="openshift-marketplace/redhat-operators-fv7zw" Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.594431 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/215e39ba-d1d9-4ca5-8b00-0efe5003c392-utilities\") pod \"redhat-operators-fv7zw\" (UID: \"215e39ba-d1d9-4ca5-8b00-0efe5003c392\") " pod="openshift-marketplace/redhat-operators-fv7zw" Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.594468 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhlqg\" (UniqueName: \"kubernetes.io/projected/215e39ba-d1d9-4ca5-8b00-0efe5003c392-kube-api-access-hhlqg\") pod \"redhat-operators-fv7zw\" (UID: \"215e39ba-d1d9-4ca5-8b00-0efe5003c392\") " pod="openshift-marketplace/redhat-operators-fv7zw" Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.595022 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/215e39ba-d1d9-4ca5-8b00-0efe5003c392-catalog-content\") pod \"redhat-operators-fv7zw\" (UID: \"215e39ba-d1d9-4ca5-8b00-0efe5003c392\") " pod="openshift-marketplace/redhat-operators-fv7zw" Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.595178 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/215e39ba-d1d9-4ca5-8b00-0efe5003c392-utilities\") pod \"redhat-operators-fv7zw\" (UID: \"215e39ba-d1d9-4ca5-8b00-0efe5003c392\") " pod="openshift-marketplace/redhat-operators-fv7zw" Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.615618 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhlqg\" (UniqueName: \"kubernetes.io/projected/215e39ba-d1d9-4ca5-8b00-0efe5003c392-kube-api-access-hhlqg\") pod \"redhat-operators-fv7zw\" (UID: \"215e39ba-d1d9-4ca5-8b00-0efe5003c392\") " pod="openshift-marketplace/redhat-operators-fv7zw" Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.687427 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fv7zw" Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.795118 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:43:01 crc kubenswrapper[4747]: I1202 17:43:01.795175 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:43:02 crc kubenswrapper[4747]: I1202 17:43:02.179782 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fv7zw"] Dec 02 17:43:02 crc kubenswrapper[4747]: W1202 17:43:02.196364 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod215e39ba_d1d9_4ca5_8b00_0efe5003c392.slice/crio-3249524adbaf4e3caa24a84d220d6232ac65a2aae9918aac1961d9a930da2495 WatchSource:0}: Error finding container 3249524adbaf4e3caa24a84d220d6232ac65a2aae9918aac1961d9a930da2495: Status 404 returned error can't find the container with id 3249524adbaf4e3caa24a84d220d6232ac65a2aae9918aac1961d9a930da2495 Dec 02 17:43:02 crc kubenswrapper[4747]: I1202 17:43:02.749748 4747 generic.go:334] "Generic (PLEG): container finished" podID="215e39ba-d1d9-4ca5-8b00-0efe5003c392" containerID="cb918bb8ec6a473b595f75c882be65958e7222bdaeccabcd63edf1696d79c0af" exitCode=0 Dec 02 17:43:02 crc kubenswrapper[4747]: I1202 17:43:02.749801 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fv7zw" event={"ID":"215e39ba-d1d9-4ca5-8b00-0efe5003c392","Type":"ContainerDied","Data":"cb918bb8ec6a473b595f75c882be65958e7222bdaeccabcd63edf1696d79c0af"} Dec 02 17:43:02 crc kubenswrapper[4747]: I1202 17:43:02.749830 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fv7zw" event={"ID":"215e39ba-d1d9-4ca5-8b00-0efe5003c392","Type":"ContainerStarted","Data":"3249524adbaf4e3caa24a84d220d6232ac65a2aae9918aac1961d9a930da2495"} Dec 02 17:43:02 crc kubenswrapper[4747]: I1202 17:43:02.752216 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 17:43:03 crc kubenswrapper[4747]: I1202 17:43:03.779770 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fv7zw" event={"ID":"215e39ba-d1d9-4ca5-8b00-0efe5003c392","Type":"ContainerStarted","Data":"c297e19e6281412112b05f32d326ee6cdf1f913ba3f139bdf3460776ad84283f"} Dec 02 17:43:06 crc kubenswrapper[4747]: E1202 17:43:06.088011 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod215e39ba_d1d9_4ca5_8b00_0efe5003c392.slice/crio-c297e19e6281412112b05f32d326ee6cdf1f913ba3f139bdf3460776ad84283f.scope\": RecentStats: unable to find data in memory cache]" Dec 02 17:43:06 crc kubenswrapper[4747]: I1202 17:43:06.793208 4747 generic.go:334] "Generic (PLEG): container finished" podID="215e39ba-d1d9-4ca5-8b00-0efe5003c392" containerID="c297e19e6281412112b05f32d326ee6cdf1f913ba3f139bdf3460776ad84283f" exitCode=0 Dec 02 17:43:06 crc kubenswrapper[4747]: I1202 17:43:06.793247 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fv7zw" event={"ID":"215e39ba-d1d9-4ca5-8b00-0efe5003c392","Type":"ContainerDied","Data":"c297e19e6281412112b05f32d326ee6cdf1f913ba3f139bdf3460776ad84283f"} Dec 02 17:43:07 crc kubenswrapper[4747]: I1202 17:43:07.818720 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fv7zw" event={"ID":"215e39ba-d1d9-4ca5-8b00-0efe5003c392","Type":"ContainerStarted","Data":"4935542e6784dadf8620578574855ecd56b471be1b83a6a3c2ec3e6f23a16bcf"} Dec 02 17:43:07 crc kubenswrapper[4747]: I1202 17:43:07.845096 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fv7zw" podStartSLOduration=2.40961028 podStartE2EDuration="6.845070149s" podCreationTimestamp="2025-12-02 17:43:01 +0000 UTC" firstStartedPulling="2025-12-02 17:43:02.751961997 +0000 UTC m=+3613.278850746" lastFinishedPulling="2025-12-02 17:43:07.187421865 +0000 UTC m=+3617.714310615" observedRunningTime="2025-12-02 17:43:07.83984337 +0000 UTC m=+3618.366732129" watchObservedRunningTime="2025-12-02 17:43:07.845070149 +0000 UTC m=+3618.371958918" Dec 02 17:43:11 crc kubenswrapper[4747]: I1202 17:43:11.688281 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fv7zw" Dec 02 17:43:11 crc kubenswrapper[4747]: I1202 17:43:11.688786 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fv7zw" Dec 02 17:43:12 crc kubenswrapper[4747]: I1202 17:43:12.749781 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fv7zw" podUID="215e39ba-d1d9-4ca5-8b00-0efe5003c392" containerName="registry-server" probeResult="failure" output=< Dec 02 17:43:12 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Dec 02 17:43:12 crc kubenswrapper[4747]: > Dec 02 17:43:21 crc kubenswrapper[4747]: I1202 17:43:21.732376 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fv7zw" Dec 02 17:43:21 crc kubenswrapper[4747]: I1202 17:43:21.795945 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fv7zw" Dec 02 17:43:21 crc kubenswrapper[4747]: I1202 17:43:21.980355 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fv7zw"] Dec 02 17:43:22 crc kubenswrapper[4747]: I1202 17:43:22.985042 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fv7zw" podUID="215e39ba-d1d9-4ca5-8b00-0efe5003c392" containerName="registry-server" containerID="cri-o://4935542e6784dadf8620578574855ecd56b471be1b83a6a3c2ec3e6f23a16bcf" gracePeriod=2 Dec 02 17:43:23 crc kubenswrapper[4747]: I1202 17:43:23.546370 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fv7zw" Dec 02 17:43:23 crc kubenswrapper[4747]: I1202 17:43:23.635953 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/215e39ba-d1d9-4ca5-8b00-0efe5003c392-utilities\") pod \"215e39ba-d1d9-4ca5-8b00-0efe5003c392\" (UID: \"215e39ba-d1d9-4ca5-8b00-0efe5003c392\") " Dec 02 17:43:23 crc kubenswrapper[4747]: I1202 17:43:23.636117 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhlqg\" (UniqueName: \"kubernetes.io/projected/215e39ba-d1d9-4ca5-8b00-0efe5003c392-kube-api-access-hhlqg\") pod \"215e39ba-d1d9-4ca5-8b00-0efe5003c392\" (UID: \"215e39ba-d1d9-4ca5-8b00-0efe5003c392\") " Dec 02 17:43:23 crc kubenswrapper[4747]: I1202 17:43:23.636208 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/215e39ba-d1d9-4ca5-8b00-0efe5003c392-catalog-content\") pod \"215e39ba-d1d9-4ca5-8b00-0efe5003c392\" (UID: \"215e39ba-d1d9-4ca5-8b00-0efe5003c392\") " Dec 02 17:43:23 crc kubenswrapper[4747]: I1202 17:43:23.636784 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/215e39ba-d1d9-4ca5-8b00-0efe5003c392-utilities" (OuterVolumeSpecName: "utilities") pod "215e39ba-d1d9-4ca5-8b00-0efe5003c392" (UID: "215e39ba-d1d9-4ca5-8b00-0efe5003c392"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:43:23 crc kubenswrapper[4747]: I1202 17:43:23.637052 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/215e39ba-d1d9-4ca5-8b00-0efe5003c392-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:43:23 crc kubenswrapper[4747]: I1202 17:43:23.649166 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/215e39ba-d1d9-4ca5-8b00-0efe5003c392-kube-api-access-hhlqg" (OuterVolumeSpecName: "kube-api-access-hhlqg") pod "215e39ba-d1d9-4ca5-8b00-0efe5003c392" (UID: "215e39ba-d1d9-4ca5-8b00-0efe5003c392"). InnerVolumeSpecName "kube-api-access-hhlqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:43:23 crc kubenswrapper[4747]: I1202 17:43:23.738547 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhlqg\" (UniqueName: \"kubernetes.io/projected/215e39ba-d1d9-4ca5-8b00-0efe5003c392-kube-api-access-hhlqg\") on node \"crc\" DevicePath \"\"" Dec 02 17:43:23 crc kubenswrapper[4747]: I1202 17:43:23.760753 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/215e39ba-d1d9-4ca5-8b00-0efe5003c392-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "215e39ba-d1d9-4ca5-8b00-0efe5003c392" (UID: "215e39ba-d1d9-4ca5-8b00-0efe5003c392"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:43:23 crc kubenswrapper[4747]: I1202 17:43:23.841500 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/215e39ba-d1d9-4ca5-8b00-0efe5003c392-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:43:24 crc kubenswrapper[4747]: I1202 17:43:24.005895 4747 generic.go:334] "Generic (PLEG): container finished" podID="215e39ba-d1d9-4ca5-8b00-0efe5003c392" containerID="4935542e6784dadf8620578574855ecd56b471be1b83a6a3c2ec3e6f23a16bcf" exitCode=0 Dec 02 17:43:24 crc kubenswrapper[4747]: I1202 17:43:24.005962 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fv7zw" event={"ID":"215e39ba-d1d9-4ca5-8b00-0efe5003c392","Type":"ContainerDied","Data":"4935542e6784dadf8620578574855ecd56b471be1b83a6a3c2ec3e6f23a16bcf"} Dec 02 17:43:24 crc kubenswrapper[4747]: I1202 17:43:24.006004 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fv7zw" event={"ID":"215e39ba-d1d9-4ca5-8b00-0efe5003c392","Type":"ContainerDied","Data":"3249524adbaf4e3caa24a84d220d6232ac65a2aae9918aac1961d9a930da2495"} Dec 02 17:43:24 crc kubenswrapper[4747]: I1202 17:43:24.006027 4747 scope.go:117] "RemoveContainer" containerID="4935542e6784dadf8620578574855ecd56b471be1b83a6a3c2ec3e6f23a16bcf" Dec 02 17:43:24 crc kubenswrapper[4747]: I1202 17:43:24.006047 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fv7zw" Dec 02 17:43:24 crc kubenswrapper[4747]: I1202 17:43:24.055767 4747 scope.go:117] "RemoveContainer" containerID="c297e19e6281412112b05f32d326ee6cdf1f913ba3f139bdf3460776ad84283f" Dec 02 17:43:24 crc kubenswrapper[4747]: I1202 17:43:24.056697 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fv7zw"] Dec 02 17:43:24 crc kubenswrapper[4747]: I1202 17:43:24.065305 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fv7zw"] Dec 02 17:43:24 crc kubenswrapper[4747]: I1202 17:43:24.098103 4747 scope.go:117] "RemoveContainer" containerID="cb918bb8ec6a473b595f75c882be65958e7222bdaeccabcd63edf1696d79c0af" Dec 02 17:43:24 crc kubenswrapper[4747]: I1202 17:43:24.142784 4747 scope.go:117] "RemoveContainer" containerID="4935542e6784dadf8620578574855ecd56b471be1b83a6a3c2ec3e6f23a16bcf" Dec 02 17:43:24 crc kubenswrapper[4747]: E1202 17:43:24.143200 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4935542e6784dadf8620578574855ecd56b471be1b83a6a3c2ec3e6f23a16bcf\": container with ID starting with 4935542e6784dadf8620578574855ecd56b471be1b83a6a3c2ec3e6f23a16bcf not found: ID does not exist" containerID="4935542e6784dadf8620578574855ecd56b471be1b83a6a3c2ec3e6f23a16bcf" Dec 02 17:43:24 crc kubenswrapper[4747]: I1202 17:43:24.143230 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4935542e6784dadf8620578574855ecd56b471be1b83a6a3c2ec3e6f23a16bcf"} err="failed to get container status \"4935542e6784dadf8620578574855ecd56b471be1b83a6a3c2ec3e6f23a16bcf\": rpc error: code = NotFound desc = could not find container \"4935542e6784dadf8620578574855ecd56b471be1b83a6a3c2ec3e6f23a16bcf\": container with ID starting with 4935542e6784dadf8620578574855ecd56b471be1b83a6a3c2ec3e6f23a16bcf not found: ID does not exist" Dec 02 17:43:24 crc kubenswrapper[4747]: I1202 17:43:24.143250 4747 scope.go:117] "RemoveContainer" containerID="c297e19e6281412112b05f32d326ee6cdf1f913ba3f139bdf3460776ad84283f" Dec 02 17:43:24 crc kubenswrapper[4747]: E1202 17:43:24.143897 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c297e19e6281412112b05f32d326ee6cdf1f913ba3f139bdf3460776ad84283f\": container with ID starting with c297e19e6281412112b05f32d326ee6cdf1f913ba3f139bdf3460776ad84283f not found: ID does not exist" containerID="c297e19e6281412112b05f32d326ee6cdf1f913ba3f139bdf3460776ad84283f" Dec 02 17:43:24 crc kubenswrapper[4747]: I1202 17:43:24.144000 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c297e19e6281412112b05f32d326ee6cdf1f913ba3f139bdf3460776ad84283f"} err="failed to get container status \"c297e19e6281412112b05f32d326ee6cdf1f913ba3f139bdf3460776ad84283f\": rpc error: code = NotFound desc = could not find container \"c297e19e6281412112b05f32d326ee6cdf1f913ba3f139bdf3460776ad84283f\": container with ID starting with c297e19e6281412112b05f32d326ee6cdf1f913ba3f139bdf3460776ad84283f not found: ID does not exist" Dec 02 17:43:24 crc kubenswrapper[4747]: I1202 17:43:24.144033 4747 scope.go:117] "RemoveContainer" containerID="cb918bb8ec6a473b595f75c882be65958e7222bdaeccabcd63edf1696d79c0af" Dec 02 17:43:24 crc kubenswrapper[4747]: E1202 17:43:24.144589 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb918bb8ec6a473b595f75c882be65958e7222bdaeccabcd63edf1696d79c0af\": container with ID starting with cb918bb8ec6a473b595f75c882be65958e7222bdaeccabcd63edf1696d79c0af not found: ID does not exist" containerID="cb918bb8ec6a473b595f75c882be65958e7222bdaeccabcd63edf1696d79c0af" Dec 02 17:43:24 crc kubenswrapper[4747]: I1202 17:43:24.144616 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb918bb8ec6a473b595f75c882be65958e7222bdaeccabcd63edf1696d79c0af"} err="failed to get container status \"cb918bb8ec6a473b595f75c882be65958e7222bdaeccabcd63edf1696d79c0af\": rpc error: code = NotFound desc = could not find container \"cb918bb8ec6a473b595f75c882be65958e7222bdaeccabcd63edf1696d79c0af\": container with ID starting with cb918bb8ec6a473b595f75c882be65958e7222bdaeccabcd63edf1696d79c0af not found: ID does not exist" Dec 02 17:43:25 crc kubenswrapper[4747]: I1202 17:43:25.780120 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="215e39ba-d1d9-4ca5-8b00-0efe5003c392" path="/var/lib/kubelet/pods/215e39ba-d1d9-4ca5-8b00-0efe5003c392/volumes" Dec 02 17:43:31 crc kubenswrapper[4747]: I1202 17:43:31.810224 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:43:31 crc kubenswrapper[4747]: I1202 17:43:31.811007 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:43:31 crc kubenswrapper[4747]: I1202 17:43:31.825174 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 17:43:31 crc kubenswrapper[4747]: I1202 17:43:31.826125 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4088f7e38f3f9699e8568820b04c28ef358afd21592a256df06f5bb3fcc89d9d"} pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 17:43:31 crc kubenswrapper[4747]: I1202 17:43:31.826221 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" containerID="cri-o://4088f7e38f3f9699e8568820b04c28ef358afd21592a256df06f5bb3fcc89d9d" gracePeriod=600 Dec 02 17:43:32 crc kubenswrapper[4747]: I1202 17:43:32.119163 4747 generic.go:334] "Generic (PLEG): container finished" podID="d7c276fe-92e7-4429-b6f8-d9488337b369" containerID="87c9d52fd2e40583b069c60ae37331e60e8e88313344c2e172a2bb930dcb701f" exitCode=0 Dec 02 17:43:32 crc kubenswrapper[4747]: I1202 17:43:32.119567 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"d7c276fe-92e7-4429-b6f8-d9488337b369","Type":"ContainerDied","Data":"87c9d52fd2e40583b069c60ae37331e60e8e88313344c2e172a2bb930dcb701f"} Dec 02 17:43:32 crc kubenswrapper[4747]: I1202 17:43:32.127227 4747 generic.go:334] "Generic (PLEG): container finished" podID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerID="4088f7e38f3f9699e8568820b04c28ef358afd21592a256df06f5bb3fcc89d9d" exitCode=0 Dec 02 17:43:32 crc kubenswrapper[4747]: I1202 17:43:32.127270 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerDied","Data":"4088f7e38f3f9699e8568820b04c28ef358afd21592a256df06f5bb3fcc89d9d"} Dec 02 17:43:32 crc kubenswrapper[4747]: I1202 17:43:32.127300 4747 scope.go:117] "RemoveContainer" containerID="b14f485af2930c7e224d216c92c5bd7d4042a4eaf11337993a5cce0f4edb9112" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.145216 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511"} Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.664215 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.753237 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/d7c276fe-92e7-4429-b6f8-d9488337b369-test-operator-ephemeral-temporary\") pod \"d7c276fe-92e7-4429-b6f8-d9488337b369\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.753466 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-ssh-key\") pod \"d7c276fe-92e7-4429-b6f8-d9488337b369\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.754212 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7c276fe-92e7-4429-b6f8-d9488337b369-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "d7c276fe-92e7-4429-b6f8-d9488337b369" (UID: "d7c276fe-92e7-4429-b6f8-d9488337b369"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.754998 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"d7c276fe-92e7-4429-b6f8-d9488337b369\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.755037 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6c7rt\" (UniqueName: \"kubernetes.io/projected/d7c276fe-92e7-4429-b6f8-d9488337b369-kube-api-access-6c7rt\") pod \"d7c276fe-92e7-4429-b6f8-d9488337b369\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.755067 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-ca-certs\") pod \"d7c276fe-92e7-4429-b6f8-d9488337b369\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.755122 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d7c276fe-92e7-4429-b6f8-d9488337b369-openstack-config\") pod \"d7c276fe-92e7-4429-b6f8-d9488337b369\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.755142 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-openstack-config-secret\") pod \"d7c276fe-92e7-4429-b6f8-d9488337b369\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.755168 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7c276fe-92e7-4429-b6f8-d9488337b369-config-data\") pod \"d7c276fe-92e7-4429-b6f8-d9488337b369\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.755185 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/d7c276fe-92e7-4429-b6f8-d9488337b369-test-operator-ephemeral-workdir\") pod \"d7c276fe-92e7-4429-b6f8-d9488337b369\" (UID: \"d7c276fe-92e7-4429-b6f8-d9488337b369\") " Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.755729 4747 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/d7c276fe-92e7-4429-b6f8-d9488337b369-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.759675 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7c276fe-92e7-4429-b6f8-d9488337b369-config-data" (OuterVolumeSpecName: "config-data") pod "d7c276fe-92e7-4429-b6f8-d9488337b369" (UID: "d7c276fe-92e7-4429-b6f8-d9488337b369"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.774363 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7c276fe-92e7-4429-b6f8-d9488337b369-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "d7c276fe-92e7-4429-b6f8-d9488337b369" (UID: "d7c276fe-92e7-4429-b6f8-d9488337b369"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.812084 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "test-operator-logs") pod "d7c276fe-92e7-4429-b6f8-d9488337b369" (UID: "d7c276fe-92e7-4429-b6f8-d9488337b369"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.812209 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7c276fe-92e7-4429-b6f8-d9488337b369-kube-api-access-6c7rt" (OuterVolumeSpecName: "kube-api-access-6c7rt") pod "d7c276fe-92e7-4429-b6f8-d9488337b369" (UID: "d7c276fe-92e7-4429-b6f8-d9488337b369"). InnerVolumeSpecName "kube-api-access-6c7rt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.824440 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d7c276fe-92e7-4429-b6f8-d9488337b369" (UID: "d7c276fe-92e7-4429-b6f8-d9488337b369"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.836094 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "d7c276fe-92e7-4429-b6f8-d9488337b369" (UID: "d7c276fe-92e7-4429-b6f8-d9488337b369"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.854161 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "d7c276fe-92e7-4429-b6f8-d9488337b369" (UID: "d7c276fe-92e7-4429-b6f8-d9488337b369"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.858659 4747 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.858708 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.858719 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6c7rt\" (UniqueName: \"kubernetes.io/projected/d7c276fe-92e7-4429-b6f8-d9488337b369-kube-api-access-6c7rt\") on node \"crc\" DevicePath \"\"" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.858728 4747 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.858737 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d7c276fe-92e7-4429-b6f8-d9488337b369-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.858744 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7c276fe-92e7-4429-b6f8-d9488337b369-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.858753 4747 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/d7c276fe-92e7-4429-b6f8-d9488337b369-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.891451 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7c276fe-92e7-4429-b6f8-d9488337b369-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "d7c276fe-92e7-4429-b6f8-d9488337b369" (UID: "d7c276fe-92e7-4429-b6f8-d9488337b369"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.891914 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.960018 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 02 17:43:33 crc kubenswrapper[4747]: I1202 17:43:33.960306 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d7c276fe-92e7-4429-b6f8-d9488337b369-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 02 17:43:34 crc kubenswrapper[4747]: I1202 17:43:34.156980 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 02 17:43:34 crc kubenswrapper[4747]: I1202 17:43:34.157236 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"d7c276fe-92e7-4429-b6f8-d9488337b369","Type":"ContainerDied","Data":"e452edd614c119cfedea994f44a754d5bc5ab9b095e7e5dade526ecacc1a9b0b"} Dec 02 17:43:34 crc kubenswrapper[4747]: I1202 17:43:34.157277 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e452edd614c119cfedea994f44a754d5bc5ab9b095e7e5dade526ecacc1a9b0b" Dec 02 17:43:42 crc kubenswrapper[4747]: I1202 17:43:42.717599 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 02 17:43:42 crc kubenswrapper[4747]: E1202 17:43:42.718454 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7c276fe-92e7-4429-b6f8-d9488337b369" containerName="tempest-tests-tempest-tests-runner" Dec 02 17:43:42 crc kubenswrapper[4747]: I1202 17:43:42.718482 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7c276fe-92e7-4429-b6f8-d9488337b369" containerName="tempest-tests-tempest-tests-runner" Dec 02 17:43:42 crc kubenswrapper[4747]: E1202 17:43:42.718513 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="215e39ba-d1d9-4ca5-8b00-0efe5003c392" containerName="registry-server" Dec 02 17:43:42 crc kubenswrapper[4747]: I1202 17:43:42.718519 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="215e39ba-d1d9-4ca5-8b00-0efe5003c392" containerName="registry-server" Dec 02 17:43:42 crc kubenswrapper[4747]: E1202 17:43:42.718549 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="215e39ba-d1d9-4ca5-8b00-0efe5003c392" containerName="extract-content" Dec 02 17:43:42 crc kubenswrapper[4747]: I1202 17:43:42.718557 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="215e39ba-d1d9-4ca5-8b00-0efe5003c392" containerName="extract-content" Dec 02 17:43:42 crc kubenswrapper[4747]: E1202 17:43:42.718570 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="215e39ba-d1d9-4ca5-8b00-0efe5003c392" containerName="extract-utilities" Dec 02 17:43:42 crc kubenswrapper[4747]: I1202 17:43:42.718578 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="215e39ba-d1d9-4ca5-8b00-0efe5003c392" containerName="extract-utilities" Dec 02 17:43:42 crc kubenswrapper[4747]: I1202 17:43:42.718756 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7c276fe-92e7-4429-b6f8-d9488337b369" containerName="tempest-tests-tempest-tests-runner" Dec 02 17:43:42 crc kubenswrapper[4747]: I1202 17:43:42.718779 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="215e39ba-d1d9-4ca5-8b00-0efe5003c392" containerName="registry-server" Dec 02 17:43:42 crc kubenswrapper[4747]: I1202 17:43:42.719404 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 17:43:42 crc kubenswrapper[4747]: I1202 17:43:42.721632 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-gkdls" Dec 02 17:43:42 crc kubenswrapper[4747]: I1202 17:43:42.738972 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 02 17:43:42 crc kubenswrapper[4747]: I1202 17:43:42.887632 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"d1805265-c562-4f13-85ad-330c827b78d6\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 17:43:42 crc kubenswrapper[4747]: I1202 17:43:42.887996 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dsvk\" (UniqueName: \"kubernetes.io/projected/d1805265-c562-4f13-85ad-330c827b78d6-kube-api-access-4dsvk\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"d1805265-c562-4f13-85ad-330c827b78d6\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 17:43:42 crc kubenswrapper[4747]: I1202 17:43:42.990886 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"d1805265-c562-4f13-85ad-330c827b78d6\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 17:43:42 crc kubenswrapper[4747]: I1202 17:43:42.991715 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dsvk\" (UniqueName: \"kubernetes.io/projected/d1805265-c562-4f13-85ad-330c827b78d6-kube-api-access-4dsvk\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"d1805265-c562-4f13-85ad-330c827b78d6\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 17:43:42 crc kubenswrapper[4747]: I1202 17:43:42.991362 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"d1805265-c562-4f13-85ad-330c827b78d6\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 17:43:43 crc kubenswrapper[4747]: I1202 17:43:43.016776 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dsvk\" (UniqueName: \"kubernetes.io/projected/d1805265-c562-4f13-85ad-330c827b78d6-kube-api-access-4dsvk\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"d1805265-c562-4f13-85ad-330c827b78d6\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 17:43:43 crc kubenswrapper[4747]: I1202 17:43:43.063573 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"d1805265-c562-4f13-85ad-330c827b78d6\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 17:43:43 crc kubenswrapper[4747]: I1202 17:43:43.352847 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 17:43:43 crc kubenswrapper[4747]: I1202 17:43:43.833095 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 02 17:43:44 crc kubenswrapper[4747]: I1202 17:43:44.264010 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"d1805265-c562-4f13-85ad-330c827b78d6","Type":"ContainerStarted","Data":"f36d4445a3b9fe19f23a698cd86f9ce34f9637e3e255e15e6970c0fe48291f23"} Dec 02 17:43:45 crc kubenswrapper[4747]: I1202 17:43:45.273676 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"d1805265-c562-4f13-85ad-330c827b78d6","Type":"ContainerStarted","Data":"5ad5d9d09dbed02be3e22e48d9bbfa911f12402e0d0b43b80d6ea709af31d2ad"} Dec 02 17:43:45 crc kubenswrapper[4747]: I1202 17:43:45.294060 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.491259163 podStartE2EDuration="3.294041191s" podCreationTimestamp="2025-12-02 17:43:42 +0000 UTC" firstStartedPulling="2025-12-02 17:43:43.84416667 +0000 UTC m=+3654.371055419" lastFinishedPulling="2025-12-02 17:43:44.646948698 +0000 UTC m=+3655.173837447" observedRunningTime="2025-12-02 17:43:45.292125247 +0000 UTC m=+3655.819014086" watchObservedRunningTime="2025-12-02 17:43:45.294041191 +0000 UTC m=+3655.820929940" Dec 02 17:44:09 crc kubenswrapper[4747]: I1202 17:44:09.640353 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-dhk6s/must-gather-zvw8c"] Dec 02 17:44:09 crc kubenswrapper[4747]: I1202 17:44:09.642271 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dhk6s/must-gather-zvw8c" Dec 02 17:44:09 crc kubenswrapper[4747]: I1202 17:44:09.644890 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-dhk6s"/"kube-root-ca.crt" Dec 02 17:44:09 crc kubenswrapper[4747]: I1202 17:44:09.644938 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-dhk6s"/"openshift-service-ca.crt" Dec 02 17:44:09 crc kubenswrapper[4747]: I1202 17:44:09.645278 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-dhk6s"/"default-dockercfg-r4pkd" Dec 02 17:44:09 crc kubenswrapper[4747]: I1202 17:44:09.650384 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-dhk6s/must-gather-zvw8c"] Dec 02 17:44:09 crc kubenswrapper[4747]: I1202 17:44:09.712264 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qw47\" (UniqueName: \"kubernetes.io/projected/a5a3b242-68b3-4f35-ad72-a9d91e543720-kube-api-access-9qw47\") pod \"must-gather-zvw8c\" (UID: \"a5a3b242-68b3-4f35-ad72-a9d91e543720\") " pod="openshift-must-gather-dhk6s/must-gather-zvw8c" Dec 02 17:44:09 crc kubenswrapper[4747]: I1202 17:44:09.712317 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a5a3b242-68b3-4f35-ad72-a9d91e543720-must-gather-output\") pod \"must-gather-zvw8c\" (UID: \"a5a3b242-68b3-4f35-ad72-a9d91e543720\") " pod="openshift-must-gather-dhk6s/must-gather-zvw8c" Dec 02 17:44:09 crc kubenswrapper[4747]: I1202 17:44:09.813842 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qw47\" (UniqueName: \"kubernetes.io/projected/a5a3b242-68b3-4f35-ad72-a9d91e543720-kube-api-access-9qw47\") pod \"must-gather-zvw8c\" (UID: \"a5a3b242-68b3-4f35-ad72-a9d91e543720\") " pod="openshift-must-gather-dhk6s/must-gather-zvw8c" Dec 02 17:44:09 crc kubenswrapper[4747]: I1202 17:44:09.813901 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a5a3b242-68b3-4f35-ad72-a9d91e543720-must-gather-output\") pod \"must-gather-zvw8c\" (UID: \"a5a3b242-68b3-4f35-ad72-a9d91e543720\") " pod="openshift-must-gather-dhk6s/must-gather-zvw8c" Dec 02 17:44:09 crc kubenswrapper[4747]: I1202 17:44:09.814492 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a5a3b242-68b3-4f35-ad72-a9d91e543720-must-gather-output\") pod \"must-gather-zvw8c\" (UID: \"a5a3b242-68b3-4f35-ad72-a9d91e543720\") " pod="openshift-must-gather-dhk6s/must-gather-zvw8c" Dec 02 17:44:09 crc kubenswrapper[4747]: I1202 17:44:09.832061 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qw47\" (UniqueName: \"kubernetes.io/projected/a5a3b242-68b3-4f35-ad72-a9d91e543720-kube-api-access-9qw47\") pod \"must-gather-zvw8c\" (UID: \"a5a3b242-68b3-4f35-ad72-a9d91e543720\") " pod="openshift-must-gather-dhk6s/must-gather-zvw8c" Dec 02 17:44:09 crc kubenswrapper[4747]: I1202 17:44:09.960212 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dhk6s/must-gather-zvw8c" Dec 02 17:44:10 crc kubenswrapper[4747]: I1202 17:44:10.476142 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-dhk6s/must-gather-zvw8c"] Dec 02 17:44:10 crc kubenswrapper[4747]: I1202 17:44:10.563716 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dhk6s/must-gather-zvw8c" event={"ID":"a5a3b242-68b3-4f35-ad72-a9d91e543720","Type":"ContainerStarted","Data":"c62f62577ac0532939888c5c41b1775fe7bd21212c4c74f2cfacdb336da4daf7"} Dec 02 17:44:15 crc kubenswrapper[4747]: I1202 17:44:15.652325 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dhk6s/must-gather-zvw8c" event={"ID":"a5a3b242-68b3-4f35-ad72-a9d91e543720","Type":"ContainerStarted","Data":"d0228949001bce849f00ec3ee506134aa1a38a6fc34f1ea1be96f8557fe2ece4"} Dec 02 17:44:15 crc kubenswrapper[4747]: I1202 17:44:15.652899 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dhk6s/must-gather-zvw8c" event={"ID":"a5a3b242-68b3-4f35-ad72-a9d91e543720","Type":"ContainerStarted","Data":"566c0d66c42135440655598addeb90485cea2a8ad6aaaf579ad1f6290e3dcae1"} Dec 02 17:44:15 crc kubenswrapper[4747]: I1202 17:44:15.690851 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-dhk6s/must-gather-zvw8c" podStartSLOduration=2.723805745 podStartE2EDuration="6.690825098s" podCreationTimestamp="2025-12-02 17:44:09 +0000 UTC" firstStartedPulling="2025-12-02 17:44:10.472976023 +0000 UTC m=+3680.999864772" lastFinishedPulling="2025-12-02 17:44:14.439995376 +0000 UTC m=+3684.966884125" observedRunningTime="2025-12-02 17:44:15.681102471 +0000 UTC m=+3686.207991270" watchObservedRunningTime="2025-12-02 17:44:15.690825098 +0000 UTC m=+3686.217713877" Dec 02 17:44:17 crc kubenswrapper[4747]: E1202 17:44:17.565108 4747 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.201:54756->38.102.83.201:33503: read tcp 38.102.83.201:54756->38.102.83.201:33503: read: connection reset by peer Dec 02 17:44:18 crc kubenswrapper[4747]: I1202 17:44:18.297049 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-dhk6s/crc-debug-4zhkh"] Dec 02 17:44:18 crc kubenswrapper[4747]: I1202 17:44:18.298854 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dhk6s/crc-debug-4zhkh" Dec 02 17:44:18 crc kubenswrapper[4747]: I1202 17:44:18.497119 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjqfs\" (UniqueName: \"kubernetes.io/projected/9b57bedd-e9b5-4b15-81a8-74f84c0dac19-kube-api-access-hjqfs\") pod \"crc-debug-4zhkh\" (UID: \"9b57bedd-e9b5-4b15-81a8-74f84c0dac19\") " pod="openshift-must-gather-dhk6s/crc-debug-4zhkh" Dec 02 17:44:18 crc kubenswrapper[4747]: I1202 17:44:18.497600 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9b57bedd-e9b5-4b15-81a8-74f84c0dac19-host\") pod \"crc-debug-4zhkh\" (UID: \"9b57bedd-e9b5-4b15-81a8-74f84c0dac19\") " pod="openshift-must-gather-dhk6s/crc-debug-4zhkh" Dec 02 17:44:18 crc kubenswrapper[4747]: I1202 17:44:18.599191 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjqfs\" (UniqueName: \"kubernetes.io/projected/9b57bedd-e9b5-4b15-81a8-74f84c0dac19-kube-api-access-hjqfs\") pod \"crc-debug-4zhkh\" (UID: \"9b57bedd-e9b5-4b15-81a8-74f84c0dac19\") " pod="openshift-must-gather-dhk6s/crc-debug-4zhkh" Dec 02 17:44:18 crc kubenswrapper[4747]: I1202 17:44:18.599272 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9b57bedd-e9b5-4b15-81a8-74f84c0dac19-host\") pod \"crc-debug-4zhkh\" (UID: \"9b57bedd-e9b5-4b15-81a8-74f84c0dac19\") " pod="openshift-must-gather-dhk6s/crc-debug-4zhkh" Dec 02 17:44:18 crc kubenswrapper[4747]: I1202 17:44:18.599378 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9b57bedd-e9b5-4b15-81a8-74f84c0dac19-host\") pod \"crc-debug-4zhkh\" (UID: \"9b57bedd-e9b5-4b15-81a8-74f84c0dac19\") " pod="openshift-must-gather-dhk6s/crc-debug-4zhkh" Dec 02 17:44:18 crc kubenswrapper[4747]: I1202 17:44:18.626838 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjqfs\" (UniqueName: \"kubernetes.io/projected/9b57bedd-e9b5-4b15-81a8-74f84c0dac19-kube-api-access-hjqfs\") pod \"crc-debug-4zhkh\" (UID: \"9b57bedd-e9b5-4b15-81a8-74f84c0dac19\") " pod="openshift-must-gather-dhk6s/crc-debug-4zhkh" Dec 02 17:44:18 crc kubenswrapper[4747]: I1202 17:44:18.914745 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dhk6s/crc-debug-4zhkh" Dec 02 17:44:18 crc kubenswrapper[4747]: W1202 17:44:18.949101 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b57bedd_e9b5_4b15_81a8_74f84c0dac19.slice/crio-f9d8fc9ddda68c90bd39d344c585848a55169f11bd00b58751132a6284bb619d WatchSource:0}: Error finding container f9d8fc9ddda68c90bd39d344c585848a55169f11bd00b58751132a6284bb619d: Status 404 returned error can't find the container with id f9d8fc9ddda68c90bd39d344c585848a55169f11bd00b58751132a6284bb619d Dec 02 17:44:19 crc kubenswrapper[4747]: I1202 17:44:19.688825 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dhk6s/crc-debug-4zhkh" event={"ID":"9b57bedd-e9b5-4b15-81a8-74f84c0dac19","Type":"ContainerStarted","Data":"f9d8fc9ddda68c90bd39d344c585848a55169f11bd00b58751132a6284bb619d"} Dec 02 17:44:31 crc kubenswrapper[4747]: I1202 17:44:31.805856 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dhk6s/crc-debug-4zhkh" event={"ID":"9b57bedd-e9b5-4b15-81a8-74f84c0dac19","Type":"ContainerStarted","Data":"684a93af0d730e7e1d2c2f13f6bfcaee39bdd0afec8584c633d4330a15cabdf4"} Dec 02 17:44:31 crc kubenswrapper[4747]: I1202 17:44:31.825736 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-dhk6s/crc-debug-4zhkh" podStartSLOduration=2.083799095 podStartE2EDuration="13.825715102s" podCreationTimestamp="2025-12-02 17:44:18 +0000 UTC" firstStartedPulling="2025-12-02 17:44:18.95113004 +0000 UTC m=+3689.478018789" lastFinishedPulling="2025-12-02 17:44:30.693046047 +0000 UTC m=+3701.219934796" observedRunningTime="2025-12-02 17:44:31.819501525 +0000 UTC m=+3702.346390274" watchObservedRunningTime="2025-12-02 17:44:31.825715102 +0000 UTC m=+3702.352603851" Dec 02 17:45:00 crc kubenswrapper[4747]: I1202 17:45:00.183736 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9"] Dec 02 17:45:00 crc kubenswrapper[4747]: I1202 17:45:00.185855 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9" Dec 02 17:45:00 crc kubenswrapper[4747]: I1202 17:45:00.189811 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 17:45:00 crc kubenswrapper[4747]: I1202 17:45:00.198186 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 17:45:00 crc kubenswrapper[4747]: I1202 17:45:00.200484 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9"] Dec 02 17:45:00 crc kubenswrapper[4747]: I1202 17:45:00.229257 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwzwq\" (UniqueName: \"kubernetes.io/projected/86c39a00-e8ff-43ab-8b4c-7924f786483c-kube-api-access-fwzwq\") pod \"collect-profiles-29411625-hvwx9\" (UID: \"86c39a00-e8ff-43ab-8b4c-7924f786483c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9" Dec 02 17:45:00 crc kubenswrapper[4747]: I1202 17:45:00.229344 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/86c39a00-e8ff-43ab-8b4c-7924f786483c-config-volume\") pod \"collect-profiles-29411625-hvwx9\" (UID: \"86c39a00-e8ff-43ab-8b4c-7924f786483c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9" Dec 02 17:45:00 crc kubenswrapper[4747]: I1202 17:45:00.229415 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/86c39a00-e8ff-43ab-8b4c-7924f786483c-secret-volume\") pod \"collect-profiles-29411625-hvwx9\" (UID: \"86c39a00-e8ff-43ab-8b4c-7924f786483c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9" Dec 02 17:45:00 crc kubenswrapper[4747]: I1202 17:45:00.331647 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/86c39a00-e8ff-43ab-8b4c-7924f786483c-secret-volume\") pod \"collect-profiles-29411625-hvwx9\" (UID: \"86c39a00-e8ff-43ab-8b4c-7924f786483c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9" Dec 02 17:45:00 crc kubenswrapper[4747]: I1202 17:45:00.331746 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwzwq\" (UniqueName: \"kubernetes.io/projected/86c39a00-e8ff-43ab-8b4c-7924f786483c-kube-api-access-fwzwq\") pod \"collect-profiles-29411625-hvwx9\" (UID: \"86c39a00-e8ff-43ab-8b4c-7924f786483c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9" Dec 02 17:45:00 crc kubenswrapper[4747]: I1202 17:45:00.331806 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/86c39a00-e8ff-43ab-8b4c-7924f786483c-config-volume\") pod \"collect-profiles-29411625-hvwx9\" (UID: \"86c39a00-e8ff-43ab-8b4c-7924f786483c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9" Dec 02 17:45:00 crc kubenswrapper[4747]: I1202 17:45:00.332668 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/86c39a00-e8ff-43ab-8b4c-7924f786483c-config-volume\") pod \"collect-profiles-29411625-hvwx9\" (UID: \"86c39a00-e8ff-43ab-8b4c-7924f786483c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9" Dec 02 17:45:00 crc kubenswrapper[4747]: I1202 17:45:00.348095 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/86c39a00-e8ff-43ab-8b4c-7924f786483c-secret-volume\") pod \"collect-profiles-29411625-hvwx9\" (UID: \"86c39a00-e8ff-43ab-8b4c-7924f786483c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9" Dec 02 17:45:00 crc kubenswrapper[4747]: I1202 17:45:00.350265 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwzwq\" (UniqueName: \"kubernetes.io/projected/86c39a00-e8ff-43ab-8b4c-7924f786483c-kube-api-access-fwzwq\") pod \"collect-profiles-29411625-hvwx9\" (UID: \"86c39a00-e8ff-43ab-8b4c-7924f786483c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9" Dec 02 17:45:00 crc kubenswrapper[4747]: I1202 17:45:00.512378 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9" Dec 02 17:45:01 crc kubenswrapper[4747]: I1202 17:45:01.081764 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9"] Dec 02 17:45:02 crc kubenswrapper[4747]: I1202 17:45:02.100402 4747 generic.go:334] "Generic (PLEG): container finished" podID="86c39a00-e8ff-43ab-8b4c-7924f786483c" containerID="c0959b3e136caf148456760bff83c22de186c89703e7627847b1db6aa6ef7e0e" exitCode=0 Dec 02 17:45:02 crc kubenswrapper[4747]: I1202 17:45:02.101069 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9" event={"ID":"86c39a00-e8ff-43ab-8b4c-7924f786483c","Type":"ContainerDied","Data":"c0959b3e136caf148456760bff83c22de186c89703e7627847b1db6aa6ef7e0e"} Dec 02 17:45:02 crc kubenswrapper[4747]: I1202 17:45:02.102407 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9" event={"ID":"86c39a00-e8ff-43ab-8b4c-7924f786483c","Type":"ContainerStarted","Data":"033de928a7fe222eb76d23e451002b368afac9155fafdb466061f4e2635aa2d4"} Dec 02 17:45:03 crc kubenswrapper[4747]: I1202 17:45:03.441133 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9" Dec 02 17:45:03 crc kubenswrapper[4747]: I1202 17:45:03.501454 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwzwq\" (UniqueName: \"kubernetes.io/projected/86c39a00-e8ff-43ab-8b4c-7924f786483c-kube-api-access-fwzwq\") pod \"86c39a00-e8ff-43ab-8b4c-7924f786483c\" (UID: \"86c39a00-e8ff-43ab-8b4c-7924f786483c\") " Dec 02 17:45:03 crc kubenswrapper[4747]: I1202 17:45:03.501625 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/86c39a00-e8ff-43ab-8b4c-7924f786483c-secret-volume\") pod \"86c39a00-e8ff-43ab-8b4c-7924f786483c\" (UID: \"86c39a00-e8ff-43ab-8b4c-7924f786483c\") " Dec 02 17:45:03 crc kubenswrapper[4747]: I1202 17:45:03.501765 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/86c39a00-e8ff-43ab-8b4c-7924f786483c-config-volume\") pod \"86c39a00-e8ff-43ab-8b4c-7924f786483c\" (UID: \"86c39a00-e8ff-43ab-8b4c-7924f786483c\") " Dec 02 17:45:03 crc kubenswrapper[4747]: I1202 17:45:03.502515 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86c39a00-e8ff-43ab-8b4c-7924f786483c-config-volume" (OuterVolumeSpecName: "config-volume") pod "86c39a00-e8ff-43ab-8b4c-7924f786483c" (UID: "86c39a00-e8ff-43ab-8b4c-7924f786483c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 17:45:03 crc kubenswrapper[4747]: I1202 17:45:03.508169 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86c39a00-e8ff-43ab-8b4c-7924f786483c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "86c39a00-e8ff-43ab-8b4c-7924f786483c" (UID: "86c39a00-e8ff-43ab-8b4c-7924f786483c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 17:45:03 crc kubenswrapper[4747]: I1202 17:45:03.516289 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86c39a00-e8ff-43ab-8b4c-7924f786483c-kube-api-access-fwzwq" (OuterVolumeSpecName: "kube-api-access-fwzwq") pod "86c39a00-e8ff-43ab-8b4c-7924f786483c" (UID: "86c39a00-e8ff-43ab-8b4c-7924f786483c"). InnerVolumeSpecName "kube-api-access-fwzwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:45:03 crc kubenswrapper[4747]: I1202 17:45:03.605111 4747 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/86c39a00-e8ff-43ab-8b4c-7924f786483c-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 17:45:03 crc kubenswrapper[4747]: I1202 17:45:03.605157 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/86c39a00-e8ff-43ab-8b4c-7924f786483c-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 17:45:03 crc kubenswrapper[4747]: I1202 17:45:03.605172 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwzwq\" (UniqueName: \"kubernetes.io/projected/86c39a00-e8ff-43ab-8b4c-7924f786483c-kube-api-access-fwzwq\") on node \"crc\" DevicePath \"\"" Dec 02 17:45:04 crc kubenswrapper[4747]: I1202 17:45:04.121361 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9" event={"ID":"86c39a00-e8ff-43ab-8b4c-7924f786483c","Type":"ContainerDied","Data":"033de928a7fe222eb76d23e451002b368afac9155fafdb466061f4e2635aa2d4"} Dec 02 17:45:04 crc kubenswrapper[4747]: I1202 17:45:04.121406 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411625-hvwx9" Dec 02 17:45:04 crc kubenswrapper[4747]: I1202 17:45:04.121406 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="033de928a7fe222eb76d23e451002b368afac9155fafdb466061f4e2635aa2d4" Dec 02 17:45:04 crc kubenswrapper[4747]: I1202 17:45:04.513855 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk"] Dec 02 17:45:04 crc kubenswrapper[4747]: I1202 17:45:04.527366 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411580-9fczk"] Dec 02 17:45:05 crc kubenswrapper[4747]: I1202 17:45:05.787309 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63ec13db-3dc2-442f-80ce-17fd41c3600b" path="/var/lib/kubelet/pods/63ec13db-3dc2-442f-80ce-17fd41c3600b/volumes" Dec 02 17:45:08 crc kubenswrapper[4747]: I1202 17:45:08.166417 4747 generic.go:334] "Generic (PLEG): container finished" podID="9b57bedd-e9b5-4b15-81a8-74f84c0dac19" containerID="684a93af0d730e7e1d2c2f13f6bfcaee39bdd0afec8584c633d4330a15cabdf4" exitCode=0 Dec 02 17:45:08 crc kubenswrapper[4747]: I1202 17:45:08.166461 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dhk6s/crc-debug-4zhkh" event={"ID":"9b57bedd-e9b5-4b15-81a8-74f84c0dac19","Type":"ContainerDied","Data":"684a93af0d730e7e1d2c2f13f6bfcaee39bdd0afec8584c633d4330a15cabdf4"} Dec 02 17:45:08 crc kubenswrapper[4747]: I1202 17:45:08.832875 4747 scope.go:117] "RemoveContainer" containerID="56ab241443be764e8e83c8af3146b5a848140131a2693d3d0538f7febec8b29e" Dec 02 17:45:09 crc kubenswrapper[4747]: I1202 17:45:09.309930 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dhk6s/crc-debug-4zhkh" Dec 02 17:45:09 crc kubenswrapper[4747]: I1202 17:45:09.328796 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9b57bedd-e9b5-4b15-81a8-74f84c0dac19-host\") pod \"9b57bedd-e9b5-4b15-81a8-74f84c0dac19\" (UID: \"9b57bedd-e9b5-4b15-81a8-74f84c0dac19\") " Dec 02 17:45:09 crc kubenswrapper[4747]: I1202 17:45:09.329143 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b57bedd-e9b5-4b15-81a8-74f84c0dac19-host" (OuterVolumeSpecName: "host") pod "9b57bedd-e9b5-4b15-81a8-74f84c0dac19" (UID: "9b57bedd-e9b5-4b15-81a8-74f84c0dac19"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 17:45:09 crc kubenswrapper[4747]: I1202 17:45:09.329747 4747 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9b57bedd-e9b5-4b15-81a8-74f84c0dac19-host\") on node \"crc\" DevicePath \"\"" Dec 02 17:45:09 crc kubenswrapper[4747]: I1202 17:45:09.356608 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-dhk6s/crc-debug-4zhkh"] Dec 02 17:45:09 crc kubenswrapper[4747]: I1202 17:45:09.367551 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-dhk6s/crc-debug-4zhkh"] Dec 02 17:45:09 crc kubenswrapper[4747]: I1202 17:45:09.430736 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjqfs\" (UniqueName: \"kubernetes.io/projected/9b57bedd-e9b5-4b15-81a8-74f84c0dac19-kube-api-access-hjqfs\") pod \"9b57bedd-e9b5-4b15-81a8-74f84c0dac19\" (UID: \"9b57bedd-e9b5-4b15-81a8-74f84c0dac19\") " Dec 02 17:45:09 crc kubenswrapper[4747]: I1202 17:45:09.441148 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b57bedd-e9b5-4b15-81a8-74f84c0dac19-kube-api-access-hjqfs" (OuterVolumeSpecName: "kube-api-access-hjqfs") pod "9b57bedd-e9b5-4b15-81a8-74f84c0dac19" (UID: "9b57bedd-e9b5-4b15-81a8-74f84c0dac19"). InnerVolumeSpecName "kube-api-access-hjqfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:45:09 crc kubenswrapper[4747]: I1202 17:45:09.532250 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjqfs\" (UniqueName: \"kubernetes.io/projected/9b57bedd-e9b5-4b15-81a8-74f84c0dac19-kube-api-access-hjqfs\") on node \"crc\" DevicePath \"\"" Dec 02 17:45:09 crc kubenswrapper[4747]: I1202 17:45:09.779750 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b57bedd-e9b5-4b15-81a8-74f84c0dac19" path="/var/lib/kubelet/pods/9b57bedd-e9b5-4b15-81a8-74f84c0dac19/volumes" Dec 02 17:45:10 crc kubenswrapper[4747]: I1202 17:45:10.190756 4747 scope.go:117] "RemoveContainer" containerID="684a93af0d730e7e1d2c2f13f6bfcaee39bdd0afec8584c633d4330a15cabdf4" Dec 02 17:45:10 crc kubenswrapper[4747]: I1202 17:45:10.190834 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dhk6s/crc-debug-4zhkh" Dec 02 17:45:10 crc kubenswrapper[4747]: I1202 17:45:10.551055 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-dhk6s/crc-debug-84xps"] Dec 02 17:45:10 crc kubenswrapper[4747]: E1202 17:45:10.551644 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86c39a00-e8ff-43ab-8b4c-7924f786483c" containerName="collect-profiles" Dec 02 17:45:10 crc kubenswrapper[4747]: I1202 17:45:10.551656 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="86c39a00-e8ff-43ab-8b4c-7924f786483c" containerName="collect-profiles" Dec 02 17:45:10 crc kubenswrapper[4747]: E1202 17:45:10.551684 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b57bedd-e9b5-4b15-81a8-74f84c0dac19" containerName="container-00" Dec 02 17:45:10 crc kubenswrapper[4747]: I1202 17:45:10.551689 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b57bedd-e9b5-4b15-81a8-74f84c0dac19" containerName="container-00" Dec 02 17:45:10 crc kubenswrapper[4747]: I1202 17:45:10.551870 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b57bedd-e9b5-4b15-81a8-74f84c0dac19" containerName="container-00" Dec 02 17:45:10 crc kubenswrapper[4747]: I1202 17:45:10.551891 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="86c39a00-e8ff-43ab-8b4c-7924f786483c" containerName="collect-profiles" Dec 02 17:45:10 crc kubenswrapper[4747]: I1202 17:45:10.552600 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dhk6s/crc-debug-84xps" Dec 02 17:45:10 crc kubenswrapper[4747]: I1202 17:45:10.555114 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a17b035-d3d6-4b4d-adb2-560938adfc9a-host\") pod \"crc-debug-84xps\" (UID: \"7a17b035-d3d6-4b4d-adb2-560938adfc9a\") " pod="openshift-must-gather-dhk6s/crc-debug-84xps" Dec 02 17:45:10 crc kubenswrapper[4747]: I1202 17:45:10.555324 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kz2rv\" (UniqueName: \"kubernetes.io/projected/7a17b035-d3d6-4b4d-adb2-560938adfc9a-kube-api-access-kz2rv\") pod \"crc-debug-84xps\" (UID: \"7a17b035-d3d6-4b4d-adb2-560938adfc9a\") " pod="openshift-must-gather-dhk6s/crc-debug-84xps" Dec 02 17:45:10 crc kubenswrapper[4747]: I1202 17:45:10.656700 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kz2rv\" (UniqueName: \"kubernetes.io/projected/7a17b035-d3d6-4b4d-adb2-560938adfc9a-kube-api-access-kz2rv\") pod \"crc-debug-84xps\" (UID: \"7a17b035-d3d6-4b4d-adb2-560938adfc9a\") " pod="openshift-must-gather-dhk6s/crc-debug-84xps" Dec 02 17:45:10 crc kubenswrapper[4747]: I1202 17:45:10.656787 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a17b035-d3d6-4b4d-adb2-560938adfc9a-host\") pod \"crc-debug-84xps\" (UID: \"7a17b035-d3d6-4b4d-adb2-560938adfc9a\") " pod="openshift-must-gather-dhk6s/crc-debug-84xps" Dec 02 17:45:10 crc kubenswrapper[4747]: I1202 17:45:10.657069 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a17b035-d3d6-4b4d-adb2-560938adfc9a-host\") pod \"crc-debug-84xps\" (UID: \"7a17b035-d3d6-4b4d-adb2-560938adfc9a\") " pod="openshift-must-gather-dhk6s/crc-debug-84xps" Dec 02 17:45:10 crc kubenswrapper[4747]: I1202 17:45:10.692556 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kz2rv\" (UniqueName: \"kubernetes.io/projected/7a17b035-d3d6-4b4d-adb2-560938adfc9a-kube-api-access-kz2rv\") pod \"crc-debug-84xps\" (UID: \"7a17b035-d3d6-4b4d-adb2-560938adfc9a\") " pod="openshift-must-gather-dhk6s/crc-debug-84xps" Dec 02 17:45:10 crc kubenswrapper[4747]: I1202 17:45:10.874469 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dhk6s/crc-debug-84xps" Dec 02 17:45:10 crc kubenswrapper[4747]: W1202 17:45:10.922521 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a17b035_d3d6_4b4d_adb2_560938adfc9a.slice/crio-e61c9b0bd3afd850f842137f3a51b9aee266dc44dbdee973f615ca7d37cced80 WatchSource:0}: Error finding container e61c9b0bd3afd850f842137f3a51b9aee266dc44dbdee973f615ca7d37cced80: Status 404 returned error can't find the container with id e61c9b0bd3afd850f842137f3a51b9aee266dc44dbdee973f615ca7d37cced80 Dec 02 17:45:11 crc kubenswrapper[4747]: I1202 17:45:11.203191 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dhk6s/crc-debug-84xps" event={"ID":"7a17b035-d3d6-4b4d-adb2-560938adfc9a","Type":"ContainerStarted","Data":"b8bfe1aed56ac32f73aa0eb72e02267a750fa124fe6249c4c22b89c7adbbb781"} Dec 02 17:45:11 crc kubenswrapper[4747]: I1202 17:45:11.203261 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dhk6s/crc-debug-84xps" event={"ID":"7a17b035-d3d6-4b4d-adb2-560938adfc9a","Type":"ContainerStarted","Data":"e61c9b0bd3afd850f842137f3a51b9aee266dc44dbdee973f615ca7d37cced80"} Dec 02 17:45:11 crc kubenswrapper[4747]: I1202 17:45:11.228526 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-dhk6s/crc-debug-84xps" podStartSLOduration=1.228508451 podStartE2EDuration="1.228508451s" podCreationTimestamp="2025-12-02 17:45:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:45:11.22498617 +0000 UTC m=+3741.751874959" watchObservedRunningTime="2025-12-02 17:45:11.228508451 +0000 UTC m=+3741.755397200" Dec 02 17:45:12 crc kubenswrapper[4747]: I1202 17:45:12.216295 4747 generic.go:334] "Generic (PLEG): container finished" podID="7a17b035-d3d6-4b4d-adb2-560938adfc9a" containerID="b8bfe1aed56ac32f73aa0eb72e02267a750fa124fe6249c4c22b89c7adbbb781" exitCode=0 Dec 02 17:45:12 crc kubenswrapper[4747]: I1202 17:45:12.216357 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dhk6s/crc-debug-84xps" event={"ID":"7a17b035-d3d6-4b4d-adb2-560938adfc9a","Type":"ContainerDied","Data":"b8bfe1aed56ac32f73aa0eb72e02267a750fa124fe6249c4c22b89c7adbbb781"} Dec 02 17:45:13 crc kubenswrapper[4747]: I1202 17:45:13.356968 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dhk6s/crc-debug-84xps" Dec 02 17:45:13 crc kubenswrapper[4747]: I1202 17:45:13.408012 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a17b035-d3d6-4b4d-adb2-560938adfc9a-host\") pod \"7a17b035-d3d6-4b4d-adb2-560938adfc9a\" (UID: \"7a17b035-d3d6-4b4d-adb2-560938adfc9a\") " Dec 02 17:45:13 crc kubenswrapper[4747]: I1202 17:45:13.408111 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7a17b035-d3d6-4b4d-adb2-560938adfc9a-host" (OuterVolumeSpecName: "host") pod "7a17b035-d3d6-4b4d-adb2-560938adfc9a" (UID: "7a17b035-d3d6-4b4d-adb2-560938adfc9a"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 17:45:13 crc kubenswrapper[4747]: I1202 17:45:13.408185 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kz2rv\" (UniqueName: \"kubernetes.io/projected/7a17b035-d3d6-4b4d-adb2-560938adfc9a-kube-api-access-kz2rv\") pod \"7a17b035-d3d6-4b4d-adb2-560938adfc9a\" (UID: \"7a17b035-d3d6-4b4d-adb2-560938adfc9a\") " Dec 02 17:45:13 crc kubenswrapper[4747]: I1202 17:45:13.409085 4747 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7a17b035-d3d6-4b4d-adb2-560938adfc9a-host\") on node \"crc\" DevicePath \"\"" Dec 02 17:45:13 crc kubenswrapper[4747]: I1202 17:45:13.413203 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-dhk6s/crc-debug-84xps"] Dec 02 17:45:13 crc kubenswrapper[4747]: I1202 17:45:13.417230 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a17b035-d3d6-4b4d-adb2-560938adfc9a-kube-api-access-kz2rv" (OuterVolumeSpecName: "kube-api-access-kz2rv") pod "7a17b035-d3d6-4b4d-adb2-560938adfc9a" (UID: "7a17b035-d3d6-4b4d-adb2-560938adfc9a"). InnerVolumeSpecName "kube-api-access-kz2rv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:45:13 crc kubenswrapper[4747]: I1202 17:45:13.431463 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-dhk6s/crc-debug-84xps"] Dec 02 17:45:13 crc kubenswrapper[4747]: I1202 17:45:13.511490 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kz2rv\" (UniqueName: \"kubernetes.io/projected/7a17b035-d3d6-4b4d-adb2-560938adfc9a-kube-api-access-kz2rv\") on node \"crc\" DevicePath \"\"" Dec 02 17:45:13 crc kubenswrapper[4747]: I1202 17:45:13.779313 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a17b035-d3d6-4b4d-adb2-560938adfc9a" path="/var/lib/kubelet/pods/7a17b035-d3d6-4b4d-adb2-560938adfc9a/volumes" Dec 02 17:45:14 crc kubenswrapper[4747]: I1202 17:45:14.243856 4747 scope.go:117] "RemoveContainer" containerID="b8bfe1aed56ac32f73aa0eb72e02267a750fa124fe6249c4c22b89c7adbbb781" Dec 02 17:45:14 crc kubenswrapper[4747]: I1202 17:45:14.243973 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dhk6s/crc-debug-84xps" Dec 02 17:45:14 crc kubenswrapper[4747]: I1202 17:45:14.617629 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-dhk6s/crc-debug-kfnsc"] Dec 02 17:45:14 crc kubenswrapper[4747]: E1202 17:45:14.618367 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a17b035-d3d6-4b4d-adb2-560938adfc9a" containerName="container-00" Dec 02 17:45:14 crc kubenswrapper[4747]: I1202 17:45:14.618382 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a17b035-d3d6-4b4d-adb2-560938adfc9a" containerName="container-00" Dec 02 17:45:14 crc kubenswrapper[4747]: I1202 17:45:14.618611 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a17b035-d3d6-4b4d-adb2-560938adfc9a" containerName="container-00" Dec 02 17:45:14 crc kubenswrapper[4747]: I1202 17:45:14.619404 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dhk6s/crc-debug-kfnsc" Dec 02 17:45:14 crc kubenswrapper[4747]: I1202 17:45:14.737793 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e6447f82-9f12-462d-816b-b245c0b7ba19-host\") pod \"crc-debug-kfnsc\" (UID: \"e6447f82-9f12-462d-816b-b245c0b7ba19\") " pod="openshift-must-gather-dhk6s/crc-debug-kfnsc" Dec 02 17:45:14 crc kubenswrapper[4747]: I1202 17:45:14.737875 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5h7q5\" (UniqueName: \"kubernetes.io/projected/e6447f82-9f12-462d-816b-b245c0b7ba19-kube-api-access-5h7q5\") pod \"crc-debug-kfnsc\" (UID: \"e6447f82-9f12-462d-816b-b245c0b7ba19\") " pod="openshift-must-gather-dhk6s/crc-debug-kfnsc" Dec 02 17:45:14 crc kubenswrapper[4747]: I1202 17:45:14.840318 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e6447f82-9f12-462d-816b-b245c0b7ba19-host\") pod \"crc-debug-kfnsc\" (UID: \"e6447f82-9f12-462d-816b-b245c0b7ba19\") " pod="openshift-must-gather-dhk6s/crc-debug-kfnsc" Dec 02 17:45:14 crc kubenswrapper[4747]: I1202 17:45:14.840406 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5h7q5\" (UniqueName: \"kubernetes.io/projected/e6447f82-9f12-462d-816b-b245c0b7ba19-kube-api-access-5h7q5\") pod \"crc-debug-kfnsc\" (UID: \"e6447f82-9f12-462d-816b-b245c0b7ba19\") " pod="openshift-must-gather-dhk6s/crc-debug-kfnsc" Dec 02 17:45:14 crc kubenswrapper[4747]: I1202 17:45:14.840597 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e6447f82-9f12-462d-816b-b245c0b7ba19-host\") pod \"crc-debug-kfnsc\" (UID: \"e6447f82-9f12-462d-816b-b245c0b7ba19\") " pod="openshift-must-gather-dhk6s/crc-debug-kfnsc" Dec 02 17:45:14 crc kubenswrapper[4747]: I1202 17:45:14.878560 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5h7q5\" (UniqueName: \"kubernetes.io/projected/e6447f82-9f12-462d-816b-b245c0b7ba19-kube-api-access-5h7q5\") pod \"crc-debug-kfnsc\" (UID: \"e6447f82-9f12-462d-816b-b245c0b7ba19\") " pod="openshift-must-gather-dhk6s/crc-debug-kfnsc" Dec 02 17:45:14 crc kubenswrapper[4747]: I1202 17:45:14.941810 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dhk6s/crc-debug-kfnsc" Dec 02 17:45:14 crc kubenswrapper[4747]: W1202 17:45:14.999442 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode6447f82_9f12_462d_816b_b245c0b7ba19.slice/crio-5fa7b898da490186267582bba6538055999c5b28a82207091061d60c9fd535c4 WatchSource:0}: Error finding container 5fa7b898da490186267582bba6538055999c5b28a82207091061d60c9fd535c4: Status 404 returned error can't find the container with id 5fa7b898da490186267582bba6538055999c5b28a82207091061d60c9fd535c4 Dec 02 17:45:15 crc kubenswrapper[4747]: I1202 17:45:15.255412 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dhk6s/crc-debug-kfnsc" event={"ID":"e6447f82-9f12-462d-816b-b245c0b7ba19","Type":"ContainerStarted","Data":"54d658c23743f333a84dc5451df5a495ddec39856ce2b9eb3f6ae2490f0727d5"} Dec 02 17:45:15 crc kubenswrapper[4747]: I1202 17:45:15.255459 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dhk6s/crc-debug-kfnsc" event={"ID":"e6447f82-9f12-462d-816b-b245c0b7ba19","Type":"ContainerStarted","Data":"5fa7b898da490186267582bba6538055999c5b28a82207091061d60c9fd535c4"} Dec 02 17:45:15 crc kubenswrapper[4747]: I1202 17:45:15.273723 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-dhk6s/crc-debug-kfnsc" podStartSLOduration=1.2737048610000001 podStartE2EDuration="1.273704861s" podCreationTimestamp="2025-12-02 17:45:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:45:15.267052802 +0000 UTC m=+3745.793941551" watchObservedRunningTime="2025-12-02 17:45:15.273704861 +0000 UTC m=+3745.800593610" Dec 02 17:45:16 crc kubenswrapper[4747]: I1202 17:45:16.274119 4747 generic.go:334] "Generic (PLEG): container finished" podID="e6447f82-9f12-462d-816b-b245c0b7ba19" containerID="54d658c23743f333a84dc5451df5a495ddec39856ce2b9eb3f6ae2490f0727d5" exitCode=0 Dec 02 17:45:16 crc kubenswrapper[4747]: I1202 17:45:16.274252 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dhk6s/crc-debug-kfnsc" event={"ID":"e6447f82-9f12-462d-816b-b245c0b7ba19","Type":"ContainerDied","Data":"54d658c23743f333a84dc5451df5a495ddec39856ce2b9eb3f6ae2490f0727d5"} Dec 02 17:45:17 crc kubenswrapper[4747]: I1202 17:45:17.410758 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dhk6s/crc-debug-kfnsc" Dec 02 17:45:17 crc kubenswrapper[4747]: I1202 17:45:17.444193 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-dhk6s/crc-debug-kfnsc"] Dec 02 17:45:17 crc kubenswrapper[4747]: I1202 17:45:17.458262 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-dhk6s/crc-debug-kfnsc"] Dec 02 17:45:17 crc kubenswrapper[4747]: I1202 17:45:17.600481 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5h7q5\" (UniqueName: \"kubernetes.io/projected/e6447f82-9f12-462d-816b-b245c0b7ba19-kube-api-access-5h7q5\") pod \"e6447f82-9f12-462d-816b-b245c0b7ba19\" (UID: \"e6447f82-9f12-462d-816b-b245c0b7ba19\") " Dec 02 17:45:17 crc kubenswrapper[4747]: I1202 17:45:17.600800 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e6447f82-9f12-462d-816b-b245c0b7ba19-host\") pod \"e6447f82-9f12-462d-816b-b245c0b7ba19\" (UID: \"e6447f82-9f12-462d-816b-b245c0b7ba19\") " Dec 02 17:45:17 crc kubenswrapper[4747]: I1202 17:45:17.601313 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e6447f82-9f12-462d-816b-b245c0b7ba19-host" (OuterVolumeSpecName: "host") pod "e6447f82-9f12-462d-816b-b245c0b7ba19" (UID: "e6447f82-9f12-462d-816b-b245c0b7ba19"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 17:45:17 crc kubenswrapper[4747]: I1202 17:45:17.621699 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6447f82-9f12-462d-816b-b245c0b7ba19-kube-api-access-5h7q5" (OuterVolumeSpecName: "kube-api-access-5h7q5") pod "e6447f82-9f12-462d-816b-b245c0b7ba19" (UID: "e6447f82-9f12-462d-816b-b245c0b7ba19"). InnerVolumeSpecName "kube-api-access-5h7q5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:45:17 crc kubenswrapper[4747]: I1202 17:45:17.703512 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5h7q5\" (UniqueName: \"kubernetes.io/projected/e6447f82-9f12-462d-816b-b245c0b7ba19-kube-api-access-5h7q5\") on node \"crc\" DevicePath \"\"" Dec 02 17:45:17 crc kubenswrapper[4747]: I1202 17:45:17.703588 4747 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e6447f82-9f12-462d-816b-b245c0b7ba19-host\") on node \"crc\" DevicePath \"\"" Dec 02 17:45:17 crc kubenswrapper[4747]: I1202 17:45:17.777231 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6447f82-9f12-462d-816b-b245c0b7ba19" path="/var/lib/kubelet/pods/e6447f82-9f12-462d-816b-b245c0b7ba19/volumes" Dec 02 17:45:18 crc kubenswrapper[4747]: I1202 17:45:18.300298 4747 scope.go:117] "RemoveContainer" containerID="54d658c23743f333a84dc5451df5a495ddec39856ce2b9eb3f6ae2490f0727d5" Dec 02 17:45:18 crc kubenswrapper[4747]: I1202 17:45:18.300364 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dhk6s/crc-debug-kfnsc" Dec 02 17:45:19 crc kubenswrapper[4747]: I1202 17:45:19.226766 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zxzm5"] Dec 02 17:45:19 crc kubenswrapper[4747]: E1202 17:45:19.228332 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6447f82-9f12-462d-816b-b245c0b7ba19" containerName="container-00" Dec 02 17:45:19 crc kubenswrapper[4747]: I1202 17:45:19.228420 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6447f82-9f12-462d-816b-b245c0b7ba19" containerName="container-00" Dec 02 17:45:19 crc kubenswrapper[4747]: I1202 17:45:19.228646 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6447f82-9f12-462d-816b-b245c0b7ba19" containerName="container-00" Dec 02 17:45:19 crc kubenswrapper[4747]: I1202 17:45:19.230057 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zxzm5" Dec 02 17:45:19 crc kubenswrapper[4747]: I1202 17:45:19.237820 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zxzm5"] Dec 02 17:45:19 crc kubenswrapper[4747]: I1202 17:45:19.336205 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2774cfc7-266d-4770-bc9d-7d22c87402a8-utilities\") pod \"community-operators-zxzm5\" (UID: \"2774cfc7-266d-4770-bc9d-7d22c87402a8\") " pod="openshift-marketplace/community-operators-zxzm5" Dec 02 17:45:19 crc kubenswrapper[4747]: I1202 17:45:19.336303 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7jw4\" (UniqueName: \"kubernetes.io/projected/2774cfc7-266d-4770-bc9d-7d22c87402a8-kube-api-access-v7jw4\") pod \"community-operators-zxzm5\" (UID: \"2774cfc7-266d-4770-bc9d-7d22c87402a8\") " pod="openshift-marketplace/community-operators-zxzm5" Dec 02 17:45:19 crc kubenswrapper[4747]: I1202 17:45:19.336459 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2774cfc7-266d-4770-bc9d-7d22c87402a8-catalog-content\") pod \"community-operators-zxzm5\" (UID: \"2774cfc7-266d-4770-bc9d-7d22c87402a8\") " pod="openshift-marketplace/community-operators-zxzm5" Dec 02 17:45:19 crc kubenswrapper[4747]: I1202 17:45:19.437874 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2774cfc7-266d-4770-bc9d-7d22c87402a8-utilities\") pod \"community-operators-zxzm5\" (UID: \"2774cfc7-266d-4770-bc9d-7d22c87402a8\") " pod="openshift-marketplace/community-operators-zxzm5" Dec 02 17:45:19 crc kubenswrapper[4747]: I1202 17:45:19.437979 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7jw4\" (UniqueName: \"kubernetes.io/projected/2774cfc7-266d-4770-bc9d-7d22c87402a8-kube-api-access-v7jw4\") pod \"community-operators-zxzm5\" (UID: \"2774cfc7-266d-4770-bc9d-7d22c87402a8\") " pod="openshift-marketplace/community-operators-zxzm5" Dec 02 17:45:19 crc kubenswrapper[4747]: I1202 17:45:19.438114 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2774cfc7-266d-4770-bc9d-7d22c87402a8-catalog-content\") pod \"community-operators-zxzm5\" (UID: \"2774cfc7-266d-4770-bc9d-7d22c87402a8\") " pod="openshift-marketplace/community-operators-zxzm5" Dec 02 17:45:19 crc kubenswrapper[4747]: I1202 17:45:19.438369 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2774cfc7-266d-4770-bc9d-7d22c87402a8-utilities\") pod \"community-operators-zxzm5\" (UID: \"2774cfc7-266d-4770-bc9d-7d22c87402a8\") " pod="openshift-marketplace/community-operators-zxzm5" Dec 02 17:45:19 crc kubenswrapper[4747]: I1202 17:45:19.438637 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2774cfc7-266d-4770-bc9d-7d22c87402a8-catalog-content\") pod \"community-operators-zxzm5\" (UID: \"2774cfc7-266d-4770-bc9d-7d22c87402a8\") " pod="openshift-marketplace/community-operators-zxzm5" Dec 02 17:45:19 crc kubenswrapper[4747]: I1202 17:45:19.455151 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7jw4\" (UniqueName: \"kubernetes.io/projected/2774cfc7-266d-4770-bc9d-7d22c87402a8-kube-api-access-v7jw4\") pod \"community-operators-zxzm5\" (UID: \"2774cfc7-266d-4770-bc9d-7d22c87402a8\") " pod="openshift-marketplace/community-operators-zxzm5" Dec 02 17:45:19 crc kubenswrapper[4747]: I1202 17:45:19.558618 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zxzm5" Dec 02 17:45:20 crc kubenswrapper[4747]: I1202 17:45:20.117204 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zxzm5"] Dec 02 17:45:20 crc kubenswrapper[4747]: W1202 17:45:20.121265 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2774cfc7_266d_4770_bc9d_7d22c87402a8.slice/crio-c8f85a675475c996687c9e438ba18134e4df4d7ce60055b3d0c132927a916718 WatchSource:0}: Error finding container c8f85a675475c996687c9e438ba18134e4df4d7ce60055b3d0c132927a916718: Status 404 returned error can't find the container with id c8f85a675475c996687c9e438ba18134e4df4d7ce60055b3d0c132927a916718 Dec 02 17:45:20 crc kubenswrapper[4747]: I1202 17:45:20.318244 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zxzm5" event={"ID":"2774cfc7-266d-4770-bc9d-7d22c87402a8","Type":"ContainerStarted","Data":"c8f85a675475c996687c9e438ba18134e4df4d7ce60055b3d0c132927a916718"} Dec 02 17:45:21 crc kubenswrapper[4747]: I1202 17:45:21.328389 4747 generic.go:334] "Generic (PLEG): container finished" podID="2774cfc7-266d-4770-bc9d-7d22c87402a8" containerID="aa34c7c7d6c4483697f997325186ba84ae7762a7eb19d82026f45614fd0e47c8" exitCode=0 Dec 02 17:45:21 crc kubenswrapper[4747]: I1202 17:45:21.328440 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zxzm5" event={"ID":"2774cfc7-266d-4770-bc9d-7d22c87402a8","Type":"ContainerDied","Data":"aa34c7c7d6c4483697f997325186ba84ae7762a7eb19d82026f45614fd0e47c8"} Dec 02 17:45:22 crc kubenswrapper[4747]: I1202 17:45:22.337858 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zxzm5" event={"ID":"2774cfc7-266d-4770-bc9d-7d22c87402a8","Type":"ContainerStarted","Data":"f3027d0c8b7b6fb567eeca561330a439a5a988152a01f2f3fb0f0b8162b231e5"} Dec 02 17:45:23 crc kubenswrapper[4747]: I1202 17:45:23.348538 4747 generic.go:334] "Generic (PLEG): container finished" podID="2774cfc7-266d-4770-bc9d-7d22c87402a8" containerID="f3027d0c8b7b6fb567eeca561330a439a5a988152a01f2f3fb0f0b8162b231e5" exitCode=0 Dec 02 17:45:23 crc kubenswrapper[4747]: I1202 17:45:23.348602 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zxzm5" event={"ID":"2774cfc7-266d-4770-bc9d-7d22c87402a8","Type":"ContainerDied","Data":"f3027d0c8b7b6fb567eeca561330a439a5a988152a01f2f3fb0f0b8162b231e5"} Dec 02 17:45:24 crc kubenswrapper[4747]: I1202 17:45:24.361531 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zxzm5" event={"ID":"2774cfc7-266d-4770-bc9d-7d22c87402a8","Type":"ContainerStarted","Data":"390a49ea6022e14e78e7c41ea113b240fe8c1c9a16d9445ed7d64623c1195004"} Dec 02 17:45:24 crc kubenswrapper[4747]: I1202 17:45:24.379825 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zxzm5" podStartSLOduration=2.894945072 podStartE2EDuration="5.379811006s" podCreationTimestamp="2025-12-02 17:45:19 +0000 UTC" firstStartedPulling="2025-12-02 17:45:21.330516614 +0000 UTC m=+3751.857405363" lastFinishedPulling="2025-12-02 17:45:23.815382548 +0000 UTC m=+3754.342271297" observedRunningTime="2025-12-02 17:45:24.377952023 +0000 UTC m=+3754.904840772" watchObservedRunningTime="2025-12-02 17:45:24.379811006 +0000 UTC m=+3754.906699755" Dec 02 17:45:29 crc kubenswrapper[4747]: I1202 17:45:29.560238 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zxzm5" Dec 02 17:45:29 crc kubenswrapper[4747]: I1202 17:45:29.560830 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zxzm5" Dec 02 17:45:29 crc kubenswrapper[4747]: I1202 17:45:29.618061 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zxzm5" Dec 02 17:45:30 crc kubenswrapper[4747]: I1202 17:45:30.491946 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zxzm5" Dec 02 17:45:30 crc kubenswrapper[4747]: I1202 17:45:30.572210 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zxzm5"] Dec 02 17:45:32 crc kubenswrapper[4747]: I1202 17:45:32.441037 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zxzm5" podUID="2774cfc7-266d-4770-bc9d-7d22c87402a8" containerName="registry-server" containerID="cri-o://390a49ea6022e14e78e7c41ea113b240fe8c1c9a16d9445ed7d64623c1195004" gracePeriod=2 Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.185159 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zxzm5" Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.349754 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2774cfc7-266d-4770-bc9d-7d22c87402a8-utilities\") pod \"2774cfc7-266d-4770-bc9d-7d22c87402a8\" (UID: \"2774cfc7-266d-4770-bc9d-7d22c87402a8\") " Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.349969 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7jw4\" (UniqueName: \"kubernetes.io/projected/2774cfc7-266d-4770-bc9d-7d22c87402a8-kube-api-access-v7jw4\") pod \"2774cfc7-266d-4770-bc9d-7d22c87402a8\" (UID: \"2774cfc7-266d-4770-bc9d-7d22c87402a8\") " Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.350169 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2774cfc7-266d-4770-bc9d-7d22c87402a8-catalog-content\") pod \"2774cfc7-266d-4770-bc9d-7d22c87402a8\" (UID: \"2774cfc7-266d-4770-bc9d-7d22c87402a8\") " Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.352121 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2774cfc7-266d-4770-bc9d-7d22c87402a8-utilities" (OuterVolumeSpecName: "utilities") pod "2774cfc7-266d-4770-bc9d-7d22c87402a8" (UID: "2774cfc7-266d-4770-bc9d-7d22c87402a8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.378967 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2774cfc7-266d-4770-bc9d-7d22c87402a8-kube-api-access-v7jw4" (OuterVolumeSpecName: "kube-api-access-v7jw4") pod "2774cfc7-266d-4770-bc9d-7d22c87402a8" (UID: "2774cfc7-266d-4770-bc9d-7d22c87402a8"). InnerVolumeSpecName "kube-api-access-v7jw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.450430 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2774cfc7-266d-4770-bc9d-7d22c87402a8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2774cfc7-266d-4770-bc9d-7d22c87402a8" (UID: "2774cfc7-266d-4770-bc9d-7d22c87402a8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.450988 4747 generic.go:334] "Generic (PLEG): container finished" podID="2774cfc7-266d-4770-bc9d-7d22c87402a8" containerID="390a49ea6022e14e78e7c41ea113b240fe8c1c9a16d9445ed7d64623c1195004" exitCode=0 Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.451050 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zxzm5" event={"ID":"2774cfc7-266d-4770-bc9d-7d22c87402a8","Type":"ContainerDied","Data":"390a49ea6022e14e78e7c41ea113b240fe8c1c9a16d9445ed7d64623c1195004"} Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.451082 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zxzm5" event={"ID":"2774cfc7-266d-4770-bc9d-7d22c87402a8","Type":"ContainerDied","Data":"c8f85a675475c996687c9e438ba18134e4df4d7ce60055b3d0c132927a916718"} Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.451101 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zxzm5" Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.451099 4747 scope.go:117] "RemoveContainer" containerID="390a49ea6022e14e78e7c41ea113b240fe8c1c9a16d9445ed7d64623c1195004" Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.455671 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2774cfc7-266d-4770-bc9d-7d22c87402a8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.455728 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2774cfc7-266d-4770-bc9d-7d22c87402a8-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.455743 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7jw4\" (UniqueName: \"kubernetes.io/projected/2774cfc7-266d-4770-bc9d-7d22c87402a8-kube-api-access-v7jw4\") on node \"crc\" DevicePath \"\"" Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.485080 4747 scope.go:117] "RemoveContainer" containerID="f3027d0c8b7b6fb567eeca561330a439a5a988152a01f2f3fb0f0b8162b231e5" Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.498555 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zxzm5"] Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.508231 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zxzm5"] Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.513169 4747 scope.go:117] "RemoveContainer" containerID="aa34c7c7d6c4483697f997325186ba84ae7762a7eb19d82026f45614fd0e47c8" Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.560448 4747 scope.go:117] "RemoveContainer" containerID="390a49ea6022e14e78e7c41ea113b240fe8c1c9a16d9445ed7d64623c1195004" Dec 02 17:45:33 crc kubenswrapper[4747]: E1202 17:45:33.561022 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"390a49ea6022e14e78e7c41ea113b240fe8c1c9a16d9445ed7d64623c1195004\": container with ID starting with 390a49ea6022e14e78e7c41ea113b240fe8c1c9a16d9445ed7d64623c1195004 not found: ID does not exist" containerID="390a49ea6022e14e78e7c41ea113b240fe8c1c9a16d9445ed7d64623c1195004" Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.561056 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"390a49ea6022e14e78e7c41ea113b240fe8c1c9a16d9445ed7d64623c1195004"} err="failed to get container status \"390a49ea6022e14e78e7c41ea113b240fe8c1c9a16d9445ed7d64623c1195004\": rpc error: code = NotFound desc = could not find container \"390a49ea6022e14e78e7c41ea113b240fe8c1c9a16d9445ed7d64623c1195004\": container with ID starting with 390a49ea6022e14e78e7c41ea113b240fe8c1c9a16d9445ed7d64623c1195004 not found: ID does not exist" Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.561078 4747 scope.go:117] "RemoveContainer" containerID="f3027d0c8b7b6fb567eeca561330a439a5a988152a01f2f3fb0f0b8162b231e5" Dec 02 17:45:33 crc kubenswrapper[4747]: E1202 17:45:33.561331 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3027d0c8b7b6fb567eeca561330a439a5a988152a01f2f3fb0f0b8162b231e5\": container with ID starting with f3027d0c8b7b6fb567eeca561330a439a5a988152a01f2f3fb0f0b8162b231e5 not found: ID does not exist" containerID="f3027d0c8b7b6fb567eeca561330a439a5a988152a01f2f3fb0f0b8162b231e5" Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.561361 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3027d0c8b7b6fb567eeca561330a439a5a988152a01f2f3fb0f0b8162b231e5"} err="failed to get container status \"f3027d0c8b7b6fb567eeca561330a439a5a988152a01f2f3fb0f0b8162b231e5\": rpc error: code = NotFound desc = could not find container \"f3027d0c8b7b6fb567eeca561330a439a5a988152a01f2f3fb0f0b8162b231e5\": container with ID starting with f3027d0c8b7b6fb567eeca561330a439a5a988152a01f2f3fb0f0b8162b231e5 not found: ID does not exist" Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.561380 4747 scope.go:117] "RemoveContainer" containerID="aa34c7c7d6c4483697f997325186ba84ae7762a7eb19d82026f45614fd0e47c8" Dec 02 17:45:33 crc kubenswrapper[4747]: E1202 17:45:33.561604 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa34c7c7d6c4483697f997325186ba84ae7762a7eb19d82026f45614fd0e47c8\": container with ID starting with aa34c7c7d6c4483697f997325186ba84ae7762a7eb19d82026f45614fd0e47c8 not found: ID does not exist" containerID="aa34c7c7d6c4483697f997325186ba84ae7762a7eb19d82026f45614fd0e47c8" Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.561631 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa34c7c7d6c4483697f997325186ba84ae7762a7eb19d82026f45614fd0e47c8"} err="failed to get container status \"aa34c7c7d6c4483697f997325186ba84ae7762a7eb19d82026f45614fd0e47c8\": rpc error: code = NotFound desc = could not find container \"aa34c7c7d6c4483697f997325186ba84ae7762a7eb19d82026f45614fd0e47c8\": container with ID starting with aa34c7c7d6c4483697f997325186ba84ae7762a7eb19d82026f45614fd0e47c8 not found: ID does not exist" Dec 02 17:45:33 crc kubenswrapper[4747]: I1202 17:45:33.774219 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2774cfc7-266d-4770-bc9d-7d22c87402a8" path="/var/lib/kubelet/pods/2774cfc7-266d-4770-bc9d-7d22c87402a8/volumes" Dec 02 17:45:34 crc kubenswrapper[4747]: I1202 17:45:34.915625 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6c8c69f76b-hxmzj_6b329ab1-0ac2-4758-b497-7650406ab087/barbican-api/0.log" Dec 02 17:45:35 crc kubenswrapper[4747]: I1202 17:45:35.084672 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6c8c69f76b-hxmzj_6b329ab1-0ac2-4758-b497-7650406ab087/barbican-api-log/0.log" Dec 02 17:45:35 crc kubenswrapper[4747]: I1202 17:45:35.145329 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7c69975b48-9pn7p_cf9be744-ba2f-43f6-bb0c-ab806681aeb2/barbican-keystone-listener/0.log" Dec 02 17:45:35 crc kubenswrapper[4747]: I1202 17:45:35.196965 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7c69975b48-9pn7p_cf9be744-ba2f-43f6-bb0c-ab806681aeb2/barbican-keystone-listener-log/0.log" Dec 02 17:45:35 crc kubenswrapper[4747]: I1202 17:45:35.283110 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-57f999f495-w8tt5_65391885-7b23-49fc-b435-ea21781c2f4d/barbican-worker/0.log" Dec 02 17:45:35 crc kubenswrapper[4747]: I1202 17:45:35.321040 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-57f999f495-w8tt5_65391885-7b23-49fc-b435-ea21781c2f4d/barbican-worker-log/0.log" Dec 02 17:45:35 crc kubenswrapper[4747]: I1202 17:45:35.462278 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd_0af08a10-c636-49d7-8b9f-ae2bdd2e6371/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:45:35 crc kubenswrapper[4747]: I1202 17:45:35.563056 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3feb84af-eb5c-4165-ba8b-b0c55cd2c369/ceilometer-central-agent/0.log" Dec 02 17:45:35 crc kubenswrapper[4747]: I1202 17:45:35.639384 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3feb84af-eb5c-4165-ba8b-b0c55cd2c369/ceilometer-notification-agent/0.log" Dec 02 17:45:35 crc kubenswrapper[4747]: I1202 17:45:35.683766 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3feb84af-eb5c-4165-ba8b-b0c55cd2c369/proxy-httpd/0.log" Dec 02 17:45:35 crc kubenswrapper[4747]: I1202 17:45:35.773517 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3feb84af-eb5c-4165-ba8b-b0c55cd2c369/sg-core/0.log" Dec 02 17:45:35 crc kubenswrapper[4747]: I1202 17:45:35.842928 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_7ae9edb9-7743-4454-adcf-86c0c8587943/cinder-api/0.log" Dec 02 17:45:35 crc kubenswrapper[4747]: I1202 17:45:35.865197 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_7ae9edb9-7743-4454-adcf-86c0c8587943/cinder-api-log/0.log" Dec 02 17:45:36 crc kubenswrapper[4747]: I1202 17:45:36.022278 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0524feb0-bce7-45c4-8cd9-38b439a4a2bb/cinder-scheduler/0.log" Dec 02 17:45:36 crc kubenswrapper[4747]: I1202 17:45:36.055796 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0524feb0-bce7-45c4-8cd9-38b439a4a2bb/probe/0.log" Dec 02 17:45:36 crc kubenswrapper[4747]: I1202 17:45:36.330548 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf_cec55b8b-af9a-473f-b92b-e3008a596073/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:45:36 crc kubenswrapper[4747]: I1202 17:45:36.409320 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2_7e920075-6847-4768-b952-8a76f23acddd/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:45:36 crc kubenswrapper[4747]: I1202 17:45:36.532148 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-5c9gt_b72c7491-0f7d-4d44-9e37-7d04f3046a38/init/0.log" Dec 02 17:45:36 crc kubenswrapper[4747]: I1202 17:45:36.694994 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-5c9gt_b72c7491-0f7d-4d44-9e37-7d04f3046a38/init/0.log" Dec 02 17:45:36 crc kubenswrapper[4747]: I1202 17:45:36.748831 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-5c9gt_b72c7491-0f7d-4d44-9e37-7d04f3046a38/dnsmasq-dns/0.log" Dec 02 17:45:36 crc kubenswrapper[4747]: I1202 17:45:36.771382 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-q57jb_df3869df-566d-4296-9b5d-555260ca14dd/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:45:36 crc kubenswrapper[4747]: I1202 17:45:36.949431 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ab63cfa7-2c4a-41cb-8e9e-707d84886893/glance-log/0.log" Dec 02 17:45:36 crc kubenswrapper[4747]: I1202 17:45:36.963409 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ab63cfa7-2c4a-41cb-8e9e-707d84886893/glance-httpd/0.log" Dec 02 17:45:37 crc kubenswrapper[4747]: I1202 17:45:37.121409 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f419079c-56d7-40cf-bfcb-7cf6a43c44ed/glance-httpd/0.log" Dec 02 17:45:37 crc kubenswrapper[4747]: I1202 17:45:37.144442 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f419079c-56d7-40cf-bfcb-7cf6a43c44ed/glance-log/0.log" Dec 02 17:45:37 crc kubenswrapper[4747]: I1202 17:45:37.328730 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7c85649748-scrqf_4ec3f1d7-119b-40fa-b0f4-3d2f353ee162/horizon/0.log" Dec 02 17:45:37 crc kubenswrapper[4747]: I1202 17:45:37.395189 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz_a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:45:37 crc kubenswrapper[4747]: I1202 17:45:37.613096 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7c85649748-scrqf_4ec3f1d7-119b-40fa-b0f4-3d2f353ee162/horizon-log/0.log" Dec 02 17:45:37 crc kubenswrapper[4747]: I1202 17:45:37.615236 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-65pqk_49daddcc-4224-45ac-b0ba-cb24ddd68dee/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:45:37 crc kubenswrapper[4747]: I1202 17:45:37.854951 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_f7425e95-34bc-464a-8d30-74ec67cd1760/kube-state-metrics/0.log" Dec 02 17:45:37 crc kubenswrapper[4747]: I1202 17:45:37.979971 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7d78c5c778-tg55f_687002d8-0be7-43ec-a1aa-61b724aec872/keystone-api/0.log" Dec 02 17:45:38 crc kubenswrapper[4747]: I1202 17:45:38.101598 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk_577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:45:38 crc kubenswrapper[4747]: I1202 17:45:38.421009 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7b56d86469-kh76t_8c47782d-dc20-451f-ac66-8555693b819f/neutron-api/0.log" Dec 02 17:45:38 crc kubenswrapper[4747]: I1202 17:45:38.474752 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7b56d86469-kh76t_8c47782d-dc20-451f-ac66-8555693b819f/neutron-httpd/0.log" Dec 02 17:45:38 crc kubenswrapper[4747]: I1202 17:45:38.523492 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc_61785f6a-0ba1-41a6-bff7-2558d21779af/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:45:39 crc kubenswrapper[4747]: I1202 17:45:39.025317 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_a264e5ce-56ed-4ffa-aee8-9951a0cdd335/nova-api-log/0.log" Dec 02 17:45:39 crc kubenswrapper[4747]: I1202 17:45:39.095778 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_a3523196-7b69-4247-b9fa-6b83ed18926a/nova-cell0-conductor-conductor/0.log" Dec 02 17:45:39 crc kubenswrapper[4747]: I1202 17:45:39.213359 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_a264e5ce-56ed-4ffa-aee8-9951a0cdd335/nova-api-api/0.log" Dec 02 17:45:39 crc kubenswrapper[4747]: I1202 17:45:39.262950 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e/nova-cell1-conductor-conductor/0.log" Dec 02 17:45:39 crc kubenswrapper[4747]: I1202 17:45:39.365051 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_49fa0781-199d-40a5-958f-591c534f25cc/nova-cell1-novncproxy-novncproxy/0.log" Dec 02 17:45:39 crc kubenswrapper[4747]: I1202 17:45:39.547489 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-dvmc6_6bf55284-16a0-45c0-8ce9-9e074f7d7e0a/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:45:39 crc kubenswrapper[4747]: I1202 17:45:39.625421 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_4236f6d8-0d70-4768-8eb5-3847fafdede8/nova-metadata-log/0.log" Dec 02 17:45:39 crc kubenswrapper[4747]: I1202 17:45:39.945000 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_0d9349be-b597-431f-badc-50d7da952f70/nova-scheduler-scheduler/0.log" Dec 02 17:45:39 crc kubenswrapper[4747]: I1202 17:45:39.957325 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_de9ec28c-1521-4af6-8473-fe8bf1cabf5d/mysql-bootstrap/0.log" Dec 02 17:45:40 crc kubenswrapper[4747]: I1202 17:45:40.074179 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_de9ec28c-1521-4af6-8473-fe8bf1cabf5d/mysql-bootstrap/0.log" Dec 02 17:45:40 crc kubenswrapper[4747]: I1202 17:45:40.156416 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_de9ec28c-1521-4af6-8473-fe8bf1cabf5d/galera/0.log" Dec 02 17:45:40 crc kubenswrapper[4747]: I1202 17:45:40.274510 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c1e65482-6e30-4e82-8c20-5fd991675dba/mysql-bootstrap/0.log" Dec 02 17:45:40 crc kubenswrapper[4747]: I1202 17:45:40.482388 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c1e65482-6e30-4e82-8c20-5fd991675dba/mysql-bootstrap/0.log" Dec 02 17:45:40 crc kubenswrapper[4747]: I1202 17:45:40.568088 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c1e65482-6e30-4e82-8c20-5fd991675dba/galera/0.log" Dec 02 17:45:40 crc kubenswrapper[4747]: I1202 17:45:40.696237 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_af70033c-2a14-481a-a85d-3063c09611fd/openstackclient/0.log" Dec 02 17:45:40 crc kubenswrapper[4747]: I1202 17:45:40.735287 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_4236f6d8-0d70-4768-8eb5-3847fafdede8/nova-metadata-metadata/0.log" Dec 02 17:45:40 crc kubenswrapper[4747]: I1202 17:45:40.795449 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-lqt7r_ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020/ovn-controller/0.log" Dec 02 17:45:40 crc kubenswrapper[4747]: I1202 17:45:40.978227 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-47dr4_0e28eacf-a679-45a6-9ab1-065400244faf/openstack-network-exporter/0.log" Dec 02 17:45:41 crc kubenswrapper[4747]: I1202 17:45:41.050284 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hbnrp_46dfbb1c-1b65-4f5d-9087-94743cb4c00e/ovsdb-server-init/0.log" Dec 02 17:45:41 crc kubenswrapper[4747]: I1202 17:45:41.292972 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hbnrp_46dfbb1c-1b65-4f5d-9087-94743cb4c00e/ovsdb-server-init/0.log" Dec 02 17:45:41 crc kubenswrapper[4747]: I1202 17:45:41.296453 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hbnrp_46dfbb1c-1b65-4f5d-9087-94743cb4c00e/ovsdb-server/0.log" Dec 02 17:45:41 crc kubenswrapper[4747]: I1202 17:45:41.308383 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hbnrp_46dfbb1c-1b65-4f5d-9087-94743cb4c00e/ovs-vswitchd/0.log" Dec 02 17:45:41 crc kubenswrapper[4747]: I1202 17:45:41.506437 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_1a525864-cf44-4b73-bc28-7bda185cd5f8/openstack-network-exporter/0.log" Dec 02 17:45:41 crc kubenswrapper[4747]: I1202 17:45:41.514810 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-rml7b_5a3d9657-8162-4edb-88bc-af303d558b2c/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:45:41 crc kubenswrapper[4747]: I1202 17:45:41.570442 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_1a525864-cf44-4b73-bc28-7bda185cd5f8/ovn-northd/0.log" Dec 02 17:45:41 crc kubenswrapper[4747]: I1202 17:45:41.720233 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_eb9eddf0-4be4-48f8-bcfe-083310ec9333/openstack-network-exporter/0.log" Dec 02 17:45:41 crc kubenswrapper[4747]: I1202 17:45:41.795097 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_eb9eddf0-4be4-48f8-bcfe-083310ec9333/ovsdbserver-nb/0.log" Dec 02 17:45:41 crc kubenswrapper[4747]: I1202 17:45:41.933937 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_5a8b10db-030f-4419-9fa8-c500ae646151/ovsdbserver-sb/0.log" Dec 02 17:45:42 crc kubenswrapper[4747]: I1202 17:45:42.053995 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_5a8b10db-030f-4419-9fa8-c500ae646151/openstack-network-exporter/0.log" Dec 02 17:45:42 crc kubenswrapper[4747]: I1202 17:45:42.176255 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6b8f479dd4-zx826_2efad855-47a7-443a-9b34-0f4137d526e0/placement-api/0.log" Dec 02 17:45:42 crc kubenswrapper[4747]: I1202 17:45:42.296661 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_11c3b905-4c74-439b-b032-a3234c0bf501/setup-container/0.log" Dec 02 17:45:42 crc kubenswrapper[4747]: I1202 17:45:42.330198 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6b8f479dd4-zx826_2efad855-47a7-443a-9b34-0f4137d526e0/placement-log/0.log" Dec 02 17:45:42 crc kubenswrapper[4747]: I1202 17:45:42.428273 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_11c3b905-4c74-439b-b032-a3234c0bf501/setup-container/0.log" Dec 02 17:45:42 crc kubenswrapper[4747]: I1202 17:45:42.471520 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_11c3b905-4c74-439b-b032-a3234c0bf501/rabbitmq/0.log" Dec 02 17:45:42 crc kubenswrapper[4747]: I1202 17:45:42.539977 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_29f33a45-0d0b-4654-879e-94098ab4b4c5/setup-container/0.log" Dec 02 17:45:42 crc kubenswrapper[4747]: I1202 17:45:42.778835 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_29f33a45-0d0b-4654-879e-94098ab4b4c5/setup-container/0.log" Dec 02 17:45:42 crc kubenswrapper[4747]: I1202 17:45:42.802656 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_29f33a45-0d0b-4654-879e-94098ab4b4c5/rabbitmq/0.log" Dec 02 17:45:42 crc kubenswrapper[4747]: I1202 17:45:42.817936 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv_b7b40fdd-ba3d-48fa-b374-279795ee9a44/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:45:42 crc kubenswrapper[4747]: I1202 17:45:42.983954 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92_0fb63be9-f6ff-45ad-a564-6e43493ea683/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:45:43 crc kubenswrapper[4747]: I1202 17:45:43.114633 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-5xr4m_a0a25e12-2008-4db8-9de5-9656b34976e0/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:45:43 crc kubenswrapper[4747]: I1202 17:45:43.279240 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-7sddt_4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:45:43 crc kubenswrapper[4747]: I1202 17:45:43.366033 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-zfq7s_ee78ab8c-165e-4f44-9b66-d74f7aa4d397/ssh-known-hosts-edpm-deployment/0.log" Dec 02 17:45:43 crc kubenswrapper[4747]: I1202 17:45:43.544189 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-57db9b8c79-f9rx5_41bc2800-e0ab-41cf-87a8-4fe9981de2e3/proxy-httpd/0.log" Dec 02 17:45:43 crc kubenswrapper[4747]: I1202 17:45:43.552231 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-57db9b8c79-f9rx5_41bc2800-e0ab-41cf-87a8-4fe9981de2e3/proxy-server/0.log" Dec 02 17:45:43 crc kubenswrapper[4747]: I1202 17:45:43.899959 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-mbqqh_f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b/swift-ring-rebalance/0.log" Dec 02 17:45:43 crc kubenswrapper[4747]: I1202 17:45:43.943301 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/account-auditor/0.log" Dec 02 17:45:44 crc kubenswrapper[4747]: I1202 17:45:44.085240 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/account-reaper/0.log" Dec 02 17:45:44 crc kubenswrapper[4747]: I1202 17:45:44.150379 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/account-replicator/0.log" Dec 02 17:45:44 crc kubenswrapper[4747]: I1202 17:45:44.181530 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/account-server/0.log" Dec 02 17:45:44 crc kubenswrapper[4747]: I1202 17:45:44.299304 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/container-replicator/0.log" Dec 02 17:45:44 crc kubenswrapper[4747]: I1202 17:45:44.300105 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/container-auditor/0.log" Dec 02 17:45:44 crc kubenswrapper[4747]: I1202 17:45:44.345857 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/container-server/0.log" Dec 02 17:45:44 crc kubenswrapper[4747]: I1202 17:45:44.407983 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/container-updater/0.log" Dec 02 17:45:44 crc kubenswrapper[4747]: I1202 17:45:44.487211 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/object-expirer/0.log" Dec 02 17:45:44 crc kubenswrapper[4747]: I1202 17:45:44.538736 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/object-auditor/0.log" Dec 02 17:45:44 crc kubenswrapper[4747]: I1202 17:45:44.584819 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/object-replicator/0.log" Dec 02 17:45:44 crc kubenswrapper[4747]: I1202 17:45:44.638572 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/object-server/0.log" Dec 02 17:45:44 crc kubenswrapper[4747]: I1202 17:45:44.678915 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/object-updater/0.log" Dec 02 17:45:44 crc kubenswrapper[4747]: I1202 17:45:44.745212 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/rsync/0.log" Dec 02 17:45:44 crc kubenswrapper[4747]: I1202 17:45:44.767081 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/swift-recon-cron/0.log" Dec 02 17:45:44 crc kubenswrapper[4747]: I1202 17:45:44.893368 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn_aaa35a26-230d-4226-a19b-776a48b1bf07/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:45:44 crc kubenswrapper[4747]: I1202 17:45:44.999679 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_d7c276fe-92e7-4429-b6f8-d9488337b369/tempest-tests-tempest-tests-runner/0.log" Dec 02 17:45:45 crc kubenswrapper[4747]: I1202 17:45:45.106561 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_d1805265-c562-4f13-85ad-330c827b78d6/test-operator-logs-container/0.log" Dec 02 17:45:45 crc kubenswrapper[4747]: I1202 17:45:45.302551 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-l7fct_ca7ca1bc-1500-4246-b79c-6566ec5f0281/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:45:51 crc kubenswrapper[4747]: I1202 17:45:51.789612 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_11fbad36-d913-4a80-b1db-6f9707f8c370/memcached/0.log" Dec 02 17:46:01 crc kubenswrapper[4747]: I1202 17:46:01.795547 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:46:01 crc kubenswrapper[4747]: I1202 17:46:01.795970 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:46:10 crc kubenswrapper[4747]: I1202 17:46:10.654723 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v_3755dfee-851d-47aa-95a3-85dc9da31e53/util/0.log" Dec 02 17:46:10 crc kubenswrapper[4747]: I1202 17:46:10.757619 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v_3755dfee-851d-47aa-95a3-85dc9da31e53/util/0.log" Dec 02 17:46:10 crc kubenswrapper[4747]: I1202 17:46:10.795140 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v_3755dfee-851d-47aa-95a3-85dc9da31e53/pull/0.log" Dec 02 17:46:10 crc kubenswrapper[4747]: I1202 17:46:10.824856 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v_3755dfee-851d-47aa-95a3-85dc9da31e53/pull/0.log" Dec 02 17:46:10 crc kubenswrapper[4747]: I1202 17:46:10.986590 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v_3755dfee-851d-47aa-95a3-85dc9da31e53/util/0.log" Dec 02 17:46:11 crc kubenswrapper[4747]: I1202 17:46:11.014046 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v_3755dfee-851d-47aa-95a3-85dc9da31e53/extract/0.log" Dec 02 17:46:11 crc kubenswrapper[4747]: I1202 17:46:11.015993 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v_3755dfee-851d-47aa-95a3-85dc9da31e53/pull/0.log" Dec 02 17:46:11 crc kubenswrapper[4747]: I1202 17:46:11.142316 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-5bfbbb859d-5b75z_02a1a6c9-a064-447d-85b3-61d6de6bba1a/kube-rbac-proxy/0.log" Dec 02 17:46:11 crc kubenswrapper[4747]: I1202 17:46:11.245191 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-5bfbbb859d-5b75z_02a1a6c9-a064-447d-85b3-61d6de6bba1a/manager/0.log" Dec 02 17:46:11 crc kubenswrapper[4747]: I1202 17:46:11.249770 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-748967c98-zjpdf_84349f11-8427-4aa8-ae24-750a6fdc5e78/kube-rbac-proxy/0.log" Dec 02 17:46:11 crc kubenswrapper[4747]: I1202 17:46:11.383543 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-748967c98-zjpdf_84349f11-8427-4aa8-ae24-750a6fdc5e78/manager/0.log" Dec 02 17:46:11 crc kubenswrapper[4747]: I1202 17:46:11.451080 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-6788cc6d75-nsvrj_b1871043-c496-421e-8055-817652748d46/kube-rbac-proxy/0.log" Dec 02 17:46:11 crc kubenswrapper[4747]: I1202 17:46:11.494663 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-6788cc6d75-nsvrj_b1871043-c496-421e-8055-817652748d46/manager/0.log" Dec 02 17:46:11 crc kubenswrapper[4747]: I1202 17:46:11.620824 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-85fbd69fcd-ns9p8_1a8a4d9e-ee5e-4235-bae4-23eb196dac78/kube-rbac-proxy/0.log" Dec 02 17:46:11 crc kubenswrapper[4747]: I1202 17:46:11.780176 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-85fbd69fcd-ns9p8_1a8a4d9e-ee5e-4235-bae4-23eb196dac78/manager/0.log" Dec 02 17:46:11 crc kubenswrapper[4747]: I1202 17:46:11.805350 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-698d6fd7d6-jhjgl_aac0c240-30e1-410d-bab3-b87965dbd297/kube-rbac-proxy/0.log" Dec 02 17:46:11 crc kubenswrapper[4747]: I1202 17:46:11.840204 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-698d6fd7d6-jhjgl_aac0c240-30e1-410d-bab3-b87965dbd297/manager/0.log" Dec 02 17:46:11 crc kubenswrapper[4747]: I1202 17:46:11.949229 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7d5d9fd47f-dhznh_dff222d7-fd30-4d3a-839a-6478da00ef65/kube-rbac-proxy/0.log" Dec 02 17:46:11 crc kubenswrapper[4747]: I1202 17:46:11.995730 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7d5d9fd47f-dhznh_dff222d7-fd30-4d3a-839a-6478da00ef65/manager/0.log" Dec 02 17:46:12 crc kubenswrapper[4747]: I1202 17:46:12.098926 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-6c55d8d69b-f7k5g_eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb/kube-rbac-proxy/0.log" Dec 02 17:46:12 crc kubenswrapper[4747]: I1202 17:46:12.187365 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-54485f899-qjnsp_c637ccff-ec15-453e-9d0b-1e9d013f5f60/kube-rbac-proxy/0.log" Dec 02 17:46:12 crc kubenswrapper[4747]: I1202 17:46:12.316851 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-6c55d8d69b-f7k5g_eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb/manager/0.log" Dec 02 17:46:12 crc kubenswrapper[4747]: I1202 17:46:12.368895 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-54485f899-qjnsp_c637ccff-ec15-453e-9d0b-1e9d013f5f60/manager/0.log" Dec 02 17:46:12 crc kubenswrapper[4747]: I1202 17:46:12.442864 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-79cc9d59f5-ftf2c_3e91b7b4-9289-4769-83a7-4cd35038aaad/kube-rbac-proxy/0.log" Dec 02 17:46:12 crc kubenswrapper[4747]: I1202 17:46:12.550699 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-79cc9d59f5-ftf2c_3e91b7b4-9289-4769-83a7-4cd35038aaad/manager/0.log" Dec 02 17:46:12 crc kubenswrapper[4747]: I1202 17:46:12.738885 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5cbc8c7f96-f2qv7_3685e169-7bca-47ae-b5bc-5945db4fa054/kube-rbac-proxy/0.log" Dec 02 17:46:12 crc kubenswrapper[4747]: I1202 17:46:12.779288 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5cbc8c7f96-f2qv7_3685e169-7bca-47ae-b5bc-5945db4fa054/manager/0.log" Dec 02 17:46:12 crc kubenswrapper[4747]: I1202 17:46:12.875657 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-64d7c556cd-vkmkn_3f797f20-b787-4429-a862-badf66ed38ea/kube-rbac-proxy/0.log" Dec 02 17:46:12 crc kubenswrapper[4747]: I1202 17:46:12.923288 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-64d7c556cd-vkmkn_3f797f20-b787-4429-a862-badf66ed38ea/manager/0.log" Dec 02 17:46:13 crc kubenswrapper[4747]: I1202 17:46:13.083492 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-58879495c-wg54q_f5edfca9-f892-409d-856c-70e757072464/manager/0.log" Dec 02 17:46:13 crc kubenswrapper[4747]: I1202 17:46:13.098817 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-58879495c-wg54q_f5edfca9-f892-409d-856c-70e757072464/kube-rbac-proxy/0.log" Dec 02 17:46:13 crc kubenswrapper[4747]: I1202 17:46:13.175552 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79d658b66d-tkgmm_11014c8f-0b84-470f-aaf8-0d029800d594/kube-rbac-proxy/0.log" Dec 02 17:46:13 crc kubenswrapper[4747]: I1202 17:46:13.264553 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-d5fb87cb8-8z6z7_93ff580d-18b2-4e1f-af0b-f2bd36b1e0db/kube-rbac-proxy/0.log" Dec 02 17:46:13 crc kubenswrapper[4747]: I1202 17:46:13.331731 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79d658b66d-tkgmm_11014c8f-0b84-470f-aaf8-0d029800d594/manager/0.log" Dec 02 17:46:13 crc kubenswrapper[4747]: I1202 17:46:13.366081 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-d5fb87cb8-8z6z7_93ff580d-18b2-4e1f-af0b-f2bd36b1e0db/manager/0.log" Dec 02 17:46:13 crc kubenswrapper[4747]: I1202 17:46:13.509411 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-77868f484-n5tnz_50fe2539-78c5-4fde-9554-30143fdc520f/kube-rbac-proxy/0.log" Dec 02 17:46:13 crc kubenswrapper[4747]: I1202 17:46:13.518669 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-77868f484-n5tnz_50fe2539-78c5-4fde-9554-30143fdc520f/manager/0.log" Dec 02 17:46:13 crc kubenswrapper[4747]: I1202 17:46:13.665926 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-78568b558-x86lp_882ad3dc-b8df-4107-855a-bdf7dc10786b/kube-rbac-proxy/0.log" Dec 02 17:46:13 crc kubenswrapper[4747]: I1202 17:46:13.800770 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5d4dbc7dd5-9bkrr_8983941a-65e0-4b84-9b02-38dc34133b0f/kube-rbac-proxy/0.log" Dec 02 17:46:14 crc kubenswrapper[4747]: I1202 17:46:14.033103 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-br7nx_17f243ad-ab56-4cd4-9c45-1c665320e93d/registry-server/0.log" Dec 02 17:46:14 crc kubenswrapper[4747]: I1202 17:46:14.038683 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5d4dbc7dd5-9bkrr_8983941a-65e0-4b84-9b02-38dc34133b0f/operator/0.log" Dec 02 17:46:14 crc kubenswrapper[4747]: I1202 17:46:14.260222 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-5b67cfc8fb-j9r8v_d539442d-27c5-4383-9a11-589905951e21/kube-rbac-proxy/0.log" Dec 02 17:46:14 crc kubenswrapper[4747]: I1202 17:46:14.333817 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-5b67cfc8fb-j9r8v_d539442d-27c5-4383-9a11-589905951e21/manager/0.log" Dec 02 17:46:14 crc kubenswrapper[4747]: I1202 17:46:14.492946 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-867d87977b-kpccv_c8dc257d-bb42-43c0-b784-483ccb97f95f/kube-rbac-proxy/0.log" Dec 02 17:46:14 crc kubenswrapper[4747]: I1202 17:46:14.586372 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg_75bdef4f-9dfa-4699-9cea-b2804869c8ef/operator/0.log" Dec 02 17:46:14 crc kubenswrapper[4747]: I1202 17:46:14.592864 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-867d87977b-kpccv_c8dc257d-bb42-43c0-b784-483ccb97f95f/manager/0.log" Dec 02 17:46:14 crc kubenswrapper[4747]: I1202 17:46:14.824676 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-78568b558-x86lp_882ad3dc-b8df-4107-855a-bdf7dc10786b/manager/0.log" Dec 02 17:46:14 crc kubenswrapper[4747]: I1202 17:46:14.847859 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-8f6687c44-lkxqf_29996641-038c-4bb4-8ed8-4cc853ab4369/kube-rbac-proxy/0.log" Dec 02 17:46:14 crc kubenswrapper[4747]: I1202 17:46:14.858369 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-8f6687c44-lkxqf_29996641-038c-4bb4-8ed8-4cc853ab4369/manager/0.log" Dec 02 17:46:14 crc kubenswrapper[4747]: I1202 17:46:14.904099 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-695797c565-mwsvx_6ea9539a-d252-4870-bdbf-4bc6d033840c/kube-rbac-proxy/0.log" Dec 02 17:46:15 crc kubenswrapper[4747]: I1202 17:46:15.008260 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-bb86466d8-zzcts_e1603c18-0e89-40a6-bb71-549cd8db07c6/kube-rbac-proxy/0.log" Dec 02 17:46:15 crc kubenswrapper[4747]: I1202 17:46:15.009583 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-695797c565-mwsvx_6ea9539a-d252-4870-bdbf-4bc6d033840c/manager/0.log" Dec 02 17:46:15 crc kubenswrapper[4747]: I1202 17:46:15.048982 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-bb86466d8-zzcts_e1603c18-0e89-40a6-bb71-549cd8db07c6/manager/0.log" Dec 02 17:46:15 crc kubenswrapper[4747]: I1202 17:46:15.155751 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6b56b8849f-swpq5_1183ccf7-36cd-41ee-96d7-cb7272989af0/kube-rbac-proxy/0.log" Dec 02 17:46:15 crc kubenswrapper[4747]: I1202 17:46:15.180950 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6b56b8849f-swpq5_1183ccf7-36cd-41ee-96d7-cb7272989af0/manager/0.log" Dec 02 17:46:31 crc kubenswrapper[4747]: I1202 17:46:31.580330 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-2lxh4_2f1dd59a-4931-4cc0-8105-f036e8e2f72f/control-plane-machine-set-operator/0.log" Dec 02 17:46:31 crc kubenswrapper[4747]: I1202 17:46:31.771190 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dn6td_9330ef08-f76f-4166-a6c0-a3275375c9f1/kube-rbac-proxy/0.log" Dec 02 17:46:31 crc kubenswrapper[4747]: I1202 17:46:31.779389 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dn6td_9330ef08-f76f-4166-a6c0-a3275375c9f1/machine-api-operator/0.log" Dec 02 17:46:31 crc kubenswrapper[4747]: I1202 17:46:31.795204 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:46:31 crc kubenswrapper[4747]: I1202 17:46:31.795259 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:46:45 crc kubenswrapper[4747]: I1202 17:46:45.114490 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-b9r5p_1e49c6c3-130a-496a-88ae-6c4f9e1c8ea9/cert-manager-controller/0.log" Dec 02 17:46:45 crc kubenswrapper[4747]: I1202 17:46:45.258565 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-vr84r_2c58e5fb-8d6a-4102-9191-ca69fb40e3bd/cert-manager-cainjector/0.log" Dec 02 17:46:45 crc kubenswrapper[4747]: I1202 17:46:45.283451 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-qb975_9ee35e2a-2967-4c76-92ec-2a7b5db87ba7/cert-manager-webhook/0.log" Dec 02 17:46:59 crc kubenswrapper[4747]: I1202 17:46:59.196346 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-c7bcn_2b678030-cb69-4a68-ab2f-140d36283f19/nmstate-console-plugin/0.log" Dec 02 17:46:59 crc kubenswrapper[4747]: I1202 17:46:59.258535 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-pj45b_00a29ad2-51bd-409c-afc3-ccd42b113c68/nmstate-handler/0.log" Dec 02 17:46:59 crc kubenswrapper[4747]: I1202 17:46:59.368773 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-f556t_cb597834-dde7-4bcb-b267-22aae223b524/kube-rbac-proxy/0.log" Dec 02 17:46:59 crc kubenswrapper[4747]: I1202 17:46:59.373433 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-f556t_cb597834-dde7-4bcb-b267-22aae223b524/nmstate-metrics/0.log" Dec 02 17:46:59 crc kubenswrapper[4747]: I1202 17:46:59.520676 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-8sr7z_b1fa5588-eede-4bb2-8b51-98f2582557bd/nmstate-operator/0.log" Dec 02 17:46:59 crc kubenswrapper[4747]: I1202 17:46:59.585533 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-lxrq8_6659cefd-449d-4eb0-a02e-d118b586ebac/nmstate-webhook/0.log" Dec 02 17:47:01 crc kubenswrapper[4747]: I1202 17:47:01.794892 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:47:01 crc kubenswrapper[4747]: I1202 17:47:01.795155 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:47:01 crc kubenswrapper[4747]: I1202 17:47:01.795198 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 17:47:01 crc kubenswrapper[4747]: I1202 17:47:01.795808 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511"} pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 17:47:01 crc kubenswrapper[4747]: I1202 17:47:01.795852 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" containerID="cri-o://6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" gracePeriod=600 Dec 02 17:47:01 crc kubenswrapper[4747]: E1202 17:47:01.914661 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:47:02 crc kubenswrapper[4747]: I1202 17:47:02.279450 4747 generic.go:334] "Generic (PLEG): container finished" podID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" exitCode=0 Dec 02 17:47:02 crc kubenswrapper[4747]: I1202 17:47:02.279509 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerDied","Data":"6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511"} Dec 02 17:47:02 crc kubenswrapper[4747]: I1202 17:47:02.279546 4747 scope.go:117] "RemoveContainer" containerID="4088f7e38f3f9699e8568820b04c28ef358afd21592a256df06f5bb3fcc89d9d" Dec 02 17:47:02 crc kubenswrapper[4747]: I1202 17:47:02.281058 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:47:02 crc kubenswrapper[4747]: E1202 17:47:02.282137 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:47:13 crc kubenswrapper[4747]: I1202 17:47:13.761874 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:47:13 crc kubenswrapper[4747]: E1202 17:47:13.762833 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:47:16 crc kubenswrapper[4747]: I1202 17:47:16.314443 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-hkpjf_3dd4af5f-feb2-49f2-ade6-35794a19f8b8/kube-rbac-proxy/0.log" Dec 02 17:47:16 crc kubenswrapper[4747]: I1202 17:47:16.409866 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-hkpjf_3dd4af5f-feb2-49f2-ade6-35794a19f8b8/controller/0.log" Dec 02 17:47:16 crc kubenswrapper[4747]: I1202 17:47:16.482700 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-frr-files/0.log" Dec 02 17:47:16 crc kubenswrapper[4747]: I1202 17:47:16.722879 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-frr-files/0.log" Dec 02 17:47:16 crc kubenswrapper[4747]: I1202 17:47:16.740554 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-reloader/0.log" Dec 02 17:47:16 crc kubenswrapper[4747]: I1202 17:47:16.771644 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-metrics/0.log" Dec 02 17:47:16 crc kubenswrapper[4747]: I1202 17:47:16.818750 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-reloader/0.log" Dec 02 17:47:16 crc kubenswrapper[4747]: I1202 17:47:16.976203 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-metrics/0.log" Dec 02 17:47:16 crc kubenswrapper[4747]: I1202 17:47:16.976831 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-reloader/0.log" Dec 02 17:47:17 crc kubenswrapper[4747]: I1202 17:47:17.010685 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-frr-files/0.log" Dec 02 17:47:17 crc kubenswrapper[4747]: I1202 17:47:17.015024 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-metrics/0.log" Dec 02 17:47:17 crc kubenswrapper[4747]: I1202 17:47:17.179869 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-metrics/0.log" Dec 02 17:47:17 crc kubenswrapper[4747]: I1202 17:47:17.223479 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-reloader/0.log" Dec 02 17:47:17 crc kubenswrapper[4747]: I1202 17:47:17.227605 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/controller/0.log" Dec 02 17:47:17 crc kubenswrapper[4747]: I1202 17:47:17.249796 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-frr-files/0.log" Dec 02 17:47:17 crc kubenswrapper[4747]: I1202 17:47:17.393988 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/kube-rbac-proxy/0.log" Dec 02 17:47:17 crc kubenswrapper[4747]: I1202 17:47:17.480648 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/kube-rbac-proxy-frr/0.log" Dec 02 17:47:17 crc kubenswrapper[4747]: I1202 17:47:17.489331 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/frr-metrics/0.log" Dec 02 17:47:17 crc kubenswrapper[4747]: I1202 17:47:17.651819 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/reloader/0.log" Dec 02 17:47:17 crc kubenswrapper[4747]: I1202 17:47:17.706195 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-llx6b_35b75b4b-4452-4e1b-8571-fbafb78b130b/frr-k8s-webhook-server/0.log" Dec 02 17:47:17 crc kubenswrapper[4747]: I1202 17:47:17.912323 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-98db7dd6f-hftd4_5e16dbd4-9db6-47d1-bfc3-549dbb8067e9/manager/0.log" Dec 02 17:47:18 crc kubenswrapper[4747]: I1202 17:47:18.076014 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5448bdf48d-p2qrm_c15d13db-952d-4732-9d31-9ba5e926796c/webhook-server/0.log" Dec 02 17:47:18 crc kubenswrapper[4747]: I1202 17:47:18.209549 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7vlkv_b7db8655-8adb-4345-b69a-574f2fbbffcb/kube-rbac-proxy/0.log" Dec 02 17:47:18 crc kubenswrapper[4747]: I1202 17:47:18.667417 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7vlkv_b7db8655-8adb-4345-b69a-574f2fbbffcb/speaker/0.log" Dec 02 17:47:18 crc kubenswrapper[4747]: I1202 17:47:18.670760 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/frr/0.log" Dec 02 17:47:28 crc kubenswrapper[4747]: I1202 17:47:28.761290 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:47:28 crc kubenswrapper[4747]: E1202 17:47:28.762238 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:47:32 crc kubenswrapper[4747]: I1202 17:47:32.261407 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs_b34ed569-9800-4521-bbd9-7e4249513755/util/0.log" Dec 02 17:47:32 crc kubenswrapper[4747]: I1202 17:47:32.441458 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs_b34ed569-9800-4521-bbd9-7e4249513755/pull/0.log" Dec 02 17:47:32 crc kubenswrapper[4747]: I1202 17:47:32.462148 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs_b34ed569-9800-4521-bbd9-7e4249513755/util/0.log" Dec 02 17:47:32 crc kubenswrapper[4747]: I1202 17:47:32.533814 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs_b34ed569-9800-4521-bbd9-7e4249513755/pull/0.log" Dec 02 17:47:32 crc kubenswrapper[4747]: I1202 17:47:32.650927 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs_b34ed569-9800-4521-bbd9-7e4249513755/util/0.log" Dec 02 17:47:32 crc kubenswrapper[4747]: I1202 17:47:32.659086 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs_b34ed569-9800-4521-bbd9-7e4249513755/extract/0.log" Dec 02 17:47:32 crc kubenswrapper[4747]: I1202 17:47:32.686669 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs_b34ed569-9800-4521-bbd9-7e4249513755/pull/0.log" Dec 02 17:47:32 crc kubenswrapper[4747]: I1202 17:47:32.818520 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd_0309c5e1-1f38-47c3-adc5-553675c969a8/util/0.log" Dec 02 17:47:33 crc kubenswrapper[4747]: I1202 17:47:33.026256 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd_0309c5e1-1f38-47c3-adc5-553675c969a8/pull/0.log" Dec 02 17:47:33 crc kubenswrapper[4747]: I1202 17:47:33.056219 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd_0309c5e1-1f38-47c3-adc5-553675c969a8/pull/0.log" Dec 02 17:47:33 crc kubenswrapper[4747]: I1202 17:47:33.058152 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd_0309c5e1-1f38-47c3-adc5-553675c969a8/util/0.log" Dec 02 17:47:33 crc kubenswrapper[4747]: I1202 17:47:33.211161 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd_0309c5e1-1f38-47c3-adc5-553675c969a8/util/0.log" Dec 02 17:47:33 crc kubenswrapper[4747]: I1202 17:47:33.241540 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd_0309c5e1-1f38-47c3-adc5-553675c969a8/pull/0.log" Dec 02 17:47:33 crc kubenswrapper[4747]: I1202 17:47:33.252888 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd_0309c5e1-1f38-47c3-adc5-553675c969a8/extract/0.log" Dec 02 17:47:33 crc kubenswrapper[4747]: I1202 17:47:33.394127 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wfvnl_233bbc8c-b118-4875-bc52-e626d7410c69/extract-utilities/0.log" Dec 02 17:47:33 crc kubenswrapper[4747]: I1202 17:47:33.596630 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wfvnl_233bbc8c-b118-4875-bc52-e626d7410c69/extract-content/0.log" Dec 02 17:47:33 crc kubenswrapper[4747]: I1202 17:47:33.598809 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wfvnl_233bbc8c-b118-4875-bc52-e626d7410c69/extract-content/0.log" Dec 02 17:47:33 crc kubenswrapper[4747]: I1202 17:47:33.656854 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wfvnl_233bbc8c-b118-4875-bc52-e626d7410c69/extract-utilities/0.log" Dec 02 17:47:33 crc kubenswrapper[4747]: I1202 17:47:33.772398 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wfvnl_233bbc8c-b118-4875-bc52-e626d7410c69/extract-utilities/0.log" Dec 02 17:47:33 crc kubenswrapper[4747]: I1202 17:47:33.836700 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wfvnl_233bbc8c-b118-4875-bc52-e626d7410c69/extract-content/0.log" Dec 02 17:47:33 crc kubenswrapper[4747]: I1202 17:47:33.966805 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlkb2_0ba8abe4-42dc-4c6d-93fa-4ed196ff9105/extract-utilities/0.log" Dec 02 17:47:34 crc kubenswrapper[4747]: I1202 17:47:34.227513 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlkb2_0ba8abe4-42dc-4c6d-93fa-4ed196ff9105/extract-content/0.log" Dec 02 17:47:34 crc kubenswrapper[4747]: I1202 17:47:34.261453 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlkb2_0ba8abe4-42dc-4c6d-93fa-4ed196ff9105/extract-content/0.log" Dec 02 17:47:34 crc kubenswrapper[4747]: I1202 17:47:34.262518 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlkb2_0ba8abe4-42dc-4c6d-93fa-4ed196ff9105/extract-utilities/0.log" Dec 02 17:47:34 crc kubenswrapper[4747]: I1202 17:47:34.297692 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wfvnl_233bbc8c-b118-4875-bc52-e626d7410c69/registry-server/0.log" Dec 02 17:47:34 crc kubenswrapper[4747]: I1202 17:47:34.445439 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlkb2_0ba8abe4-42dc-4c6d-93fa-4ed196ff9105/extract-content/0.log" Dec 02 17:47:34 crc kubenswrapper[4747]: I1202 17:47:34.451698 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlkb2_0ba8abe4-42dc-4c6d-93fa-4ed196ff9105/extract-utilities/0.log" Dec 02 17:47:34 crc kubenswrapper[4747]: I1202 17:47:34.794100 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2ptxc_7bb1d09f-243e-488b-af49-e7836ff452c8/extract-utilities/0.log" Dec 02 17:47:34 crc kubenswrapper[4747]: I1202 17:47:34.852180 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-2kmf2_dd32d5f2-ea95-4e1f-91f5-3e245c961bd6/marketplace-operator/0.log" Dec 02 17:47:35 crc kubenswrapper[4747]: I1202 17:47:35.073010 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlkb2_0ba8abe4-42dc-4c6d-93fa-4ed196ff9105/registry-server/0.log" Dec 02 17:47:35 crc kubenswrapper[4747]: I1202 17:47:35.103838 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2ptxc_7bb1d09f-243e-488b-af49-e7836ff452c8/extract-content/0.log" Dec 02 17:47:35 crc kubenswrapper[4747]: I1202 17:47:35.111237 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2ptxc_7bb1d09f-243e-488b-af49-e7836ff452c8/extract-utilities/0.log" Dec 02 17:47:35 crc kubenswrapper[4747]: I1202 17:47:35.132936 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2ptxc_7bb1d09f-243e-488b-af49-e7836ff452c8/extract-content/0.log" Dec 02 17:47:35 crc kubenswrapper[4747]: I1202 17:47:35.292568 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2ptxc_7bb1d09f-243e-488b-af49-e7836ff452c8/extract-utilities/0.log" Dec 02 17:47:35 crc kubenswrapper[4747]: I1202 17:47:35.333195 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2ptxc_7bb1d09f-243e-488b-af49-e7836ff452c8/extract-content/0.log" Dec 02 17:47:35 crc kubenswrapper[4747]: I1202 17:47:35.414583 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2ptxc_7bb1d09f-243e-488b-af49-e7836ff452c8/registry-server/0.log" Dec 02 17:47:35 crc kubenswrapper[4747]: I1202 17:47:35.499219 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p55sf_922bf4c0-8c15-43f0-a6f7-09601df7efe0/extract-utilities/0.log" Dec 02 17:47:35 crc kubenswrapper[4747]: I1202 17:47:35.687257 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p55sf_922bf4c0-8c15-43f0-a6f7-09601df7efe0/extract-utilities/0.log" Dec 02 17:47:35 crc kubenswrapper[4747]: I1202 17:47:35.706656 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p55sf_922bf4c0-8c15-43f0-a6f7-09601df7efe0/extract-content/0.log" Dec 02 17:47:35 crc kubenswrapper[4747]: I1202 17:47:35.751803 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p55sf_922bf4c0-8c15-43f0-a6f7-09601df7efe0/extract-content/0.log" Dec 02 17:47:35 crc kubenswrapper[4747]: I1202 17:47:35.854072 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p55sf_922bf4c0-8c15-43f0-a6f7-09601df7efe0/extract-utilities/0.log" Dec 02 17:47:35 crc kubenswrapper[4747]: I1202 17:47:35.906676 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p55sf_922bf4c0-8c15-43f0-a6f7-09601df7efe0/extract-content/0.log" Dec 02 17:47:36 crc kubenswrapper[4747]: I1202 17:47:36.256166 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p55sf_922bf4c0-8c15-43f0-a6f7-09601df7efe0/registry-server/0.log" Dec 02 17:47:40 crc kubenswrapper[4747]: I1202 17:47:40.762853 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:47:40 crc kubenswrapper[4747]: E1202 17:47:40.764019 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:47:55 crc kubenswrapper[4747]: I1202 17:47:55.760361 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:47:55 crc kubenswrapper[4747]: E1202 17:47:55.761027 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:48:04 crc kubenswrapper[4747]: E1202 17:48:04.057456 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache]" Dec 02 17:48:07 crc kubenswrapper[4747]: I1202 17:48:07.761469 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:48:07 crc kubenswrapper[4747]: E1202 17:48:07.762352 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:48:18 crc kubenswrapper[4747]: I1202 17:48:18.761844 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:48:18 crc kubenswrapper[4747]: E1202 17:48:18.762777 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:48:29 crc kubenswrapper[4747]: I1202 17:48:29.770358 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:48:29 crc kubenswrapper[4747]: E1202 17:48:29.771328 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:48:42 crc kubenswrapper[4747]: I1202 17:48:42.766352 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:48:42 crc kubenswrapper[4747]: E1202 17:48:42.767321 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:48:55 crc kubenswrapper[4747]: I1202 17:48:55.761885 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:48:55 crc kubenswrapper[4747]: E1202 17:48:55.763015 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:49:08 crc kubenswrapper[4747]: I1202 17:49:08.763598 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:49:08 crc kubenswrapper[4747]: E1202 17:49:08.764539 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:49:15 crc kubenswrapper[4747]: I1202 17:49:15.708232 4747 generic.go:334] "Generic (PLEG): container finished" podID="a5a3b242-68b3-4f35-ad72-a9d91e543720" containerID="566c0d66c42135440655598addeb90485cea2a8ad6aaaf579ad1f6290e3dcae1" exitCode=0 Dec 02 17:49:15 crc kubenswrapper[4747]: I1202 17:49:15.708283 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dhk6s/must-gather-zvw8c" event={"ID":"a5a3b242-68b3-4f35-ad72-a9d91e543720","Type":"ContainerDied","Data":"566c0d66c42135440655598addeb90485cea2a8ad6aaaf579ad1f6290e3dcae1"} Dec 02 17:49:15 crc kubenswrapper[4747]: I1202 17:49:15.709663 4747 scope.go:117] "RemoveContainer" containerID="566c0d66c42135440655598addeb90485cea2a8ad6aaaf579ad1f6290e3dcae1" Dec 02 17:49:16 crc kubenswrapper[4747]: I1202 17:49:16.602098 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-dhk6s_must-gather-zvw8c_a5a3b242-68b3-4f35-ad72-a9d91e543720/gather/0.log" Dec 02 17:49:23 crc kubenswrapper[4747]: I1202 17:49:23.760945 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:49:23 crc kubenswrapper[4747]: E1202 17:49:23.762058 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:49:23 crc kubenswrapper[4747]: I1202 17:49:23.839057 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-dhk6s/must-gather-zvw8c"] Dec 02 17:49:23 crc kubenswrapper[4747]: I1202 17:49:23.839353 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-dhk6s/must-gather-zvw8c" podUID="a5a3b242-68b3-4f35-ad72-a9d91e543720" containerName="copy" containerID="cri-o://d0228949001bce849f00ec3ee506134aa1a38a6fc34f1ea1be96f8557fe2ece4" gracePeriod=2 Dec 02 17:49:23 crc kubenswrapper[4747]: I1202 17:49:23.853760 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-dhk6s/must-gather-zvw8c"] Dec 02 17:49:24 crc kubenswrapper[4747]: I1202 17:49:24.408898 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-dhk6s_must-gather-zvw8c_a5a3b242-68b3-4f35-ad72-a9d91e543720/copy/0.log" Dec 02 17:49:24 crc kubenswrapper[4747]: I1202 17:49:24.409578 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dhk6s/must-gather-zvw8c" Dec 02 17:49:24 crc kubenswrapper[4747]: I1202 17:49:24.537773 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a5a3b242-68b3-4f35-ad72-a9d91e543720-must-gather-output\") pod \"a5a3b242-68b3-4f35-ad72-a9d91e543720\" (UID: \"a5a3b242-68b3-4f35-ad72-a9d91e543720\") " Dec 02 17:49:24 crc kubenswrapper[4747]: I1202 17:49:24.538003 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9qw47\" (UniqueName: \"kubernetes.io/projected/a5a3b242-68b3-4f35-ad72-a9d91e543720-kube-api-access-9qw47\") pod \"a5a3b242-68b3-4f35-ad72-a9d91e543720\" (UID: \"a5a3b242-68b3-4f35-ad72-a9d91e543720\") " Dec 02 17:49:24 crc kubenswrapper[4747]: I1202 17:49:24.548245 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5a3b242-68b3-4f35-ad72-a9d91e543720-kube-api-access-9qw47" (OuterVolumeSpecName: "kube-api-access-9qw47") pod "a5a3b242-68b3-4f35-ad72-a9d91e543720" (UID: "a5a3b242-68b3-4f35-ad72-a9d91e543720"). InnerVolumeSpecName "kube-api-access-9qw47". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:49:24 crc kubenswrapper[4747]: I1202 17:49:24.640521 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9qw47\" (UniqueName: \"kubernetes.io/projected/a5a3b242-68b3-4f35-ad72-a9d91e543720-kube-api-access-9qw47\") on node \"crc\" DevicePath \"\"" Dec 02 17:49:24 crc kubenswrapper[4747]: I1202 17:49:24.690157 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5a3b242-68b3-4f35-ad72-a9d91e543720-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "a5a3b242-68b3-4f35-ad72-a9d91e543720" (UID: "a5a3b242-68b3-4f35-ad72-a9d91e543720"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:49:24 crc kubenswrapper[4747]: I1202 17:49:24.741443 4747 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a5a3b242-68b3-4f35-ad72-a9d91e543720-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 02 17:49:24 crc kubenswrapper[4747]: I1202 17:49:24.834977 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-dhk6s_must-gather-zvw8c_a5a3b242-68b3-4f35-ad72-a9d91e543720/copy/0.log" Dec 02 17:49:24 crc kubenswrapper[4747]: I1202 17:49:24.835561 4747 generic.go:334] "Generic (PLEG): container finished" podID="a5a3b242-68b3-4f35-ad72-a9d91e543720" containerID="d0228949001bce849f00ec3ee506134aa1a38a6fc34f1ea1be96f8557fe2ece4" exitCode=143 Dec 02 17:49:24 crc kubenswrapper[4747]: I1202 17:49:24.835655 4747 scope.go:117] "RemoveContainer" containerID="d0228949001bce849f00ec3ee506134aa1a38a6fc34f1ea1be96f8557fe2ece4" Dec 02 17:49:24 crc kubenswrapper[4747]: I1202 17:49:24.835794 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dhk6s/must-gather-zvw8c" Dec 02 17:49:24 crc kubenswrapper[4747]: I1202 17:49:24.876714 4747 scope.go:117] "RemoveContainer" containerID="566c0d66c42135440655598addeb90485cea2a8ad6aaaf579ad1f6290e3dcae1" Dec 02 17:49:24 crc kubenswrapper[4747]: I1202 17:49:24.938185 4747 scope.go:117] "RemoveContainer" containerID="d0228949001bce849f00ec3ee506134aa1a38a6fc34f1ea1be96f8557fe2ece4" Dec 02 17:49:24 crc kubenswrapper[4747]: E1202 17:49:24.938709 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0228949001bce849f00ec3ee506134aa1a38a6fc34f1ea1be96f8557fe2ece4\": container with ID starting with d0228949001bce849f00ec3ee506134aa1a38a6fc34f1ea1be96f8557fe2ece4 not found: ID does not exist" containerID="d0228949001bce849f00ec3ee506134aa1a38a6fc34f1ea1be96f8557fe2ece4" Dec 02 17:49:24 crc kubenswrapper[4747]: I1202 17:49:24.938764 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0228949001bce849f00ec3ee506134aa1a38a6fc34f1ea1be96f8557fe2ece4"} err="failed to get container status \"d0228949001bce849f00ec3ee506134aa1a38a6fc34f1ea1be96f8557fe2ece4\": rpc error: code = NotFound desc = could not find container \"d0228949001bce849f00ec3ee506134aa1a38a6fc34f1ea1be96f8557fe2ece4\": container with ID starting with d0228949001bce849f00ec3ee506134aa1a38a6fc34f1ea1be96f8557fe2ece4 not found: ID does not exist" Dec 02 17:49:24 crc kubenswrapper[4747]: I1202 17:49:24.938797 4747 scope.go:117] "RemoveContainer" containerID="566c0d66c42135440655598addeb90485cea2a8ad6aaaf579ad1f6290e3dcae1" Dec 02 17:49:24 crc kubenswrapper[4747]: E1202 17:49:24.939648 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"566c0d66c42135440655598addeb90485cea2a8ad6aaaf579ad1f6290e3dcae1\": container with ID starting with 566c0d66c42135440655598addeb90485cea2a8ad6aaaf579ad1f6290e3dcae1 not found: ID does not exist" containerID="566c0d66c42135440655598addeb90485cea2a8ad6aaaf579ad1f6290e3dcae1" Dec 02 17:49:24 crc kubenswrapper[4747]: I1202 17:49:24.939680 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"566c0d66c42135440655598addeb90485cea2a8ad6aaaf579ad1f6290e3dcae1"} err="failed to get container status \"566c0d66c42135440655598addeb90485cea2a8ad6aaaf579ad1f6290e3dcae1\": rpc error: code = NotFound desc = could not find container \"566c0d66c42135440655598addeb90485cea2a8ad6aaaf579ad1f6290e3dcae1\": container with ID starting with 566c0d66c42135440655598addeb90485cea2a8ad6aaaf579ad1f6290e3dcae1 not found: ID does not exist" Dec 02 17:49:25 crc kubenswrapper[4747]: I1202 17:49:25.771084 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5a3b242-68b3-4f35-ad72-a9d91e543720" path="/var/lib/kubelet/pods/a5a3b242-68b3-4f35-ad72-a9d91e543720/volumes" Dec 02 17:49:38 crc kubenswrapper[4747]: I1202 17:49:38.762397 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:49:38 crc kubenswrapper[4747]: E1202 17:49:38.763515 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:49:49 crc kubenswrapper[4747]: I1202 17:49:49.783733 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:49:49 crc kubenswrapper[4747]: E1202 17:49:49.784749 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:50:03 crc kubenswrapper[4747]: I1202 17:50:03.760777 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:50:03 crc kubenswrapper[4747]: E1202 17:50:03.762148 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:50:18 crc kubenswrapper[4747]: I1202 17:50:18.761415 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:50:18 crc kubenswrapper[4747]: E1202 17:50:18.762470 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:50:29 crc kubenswrapper[4747]: I1202 17:50:29.774416 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:50:29 crc kubenswrapper[4747]: E1202 17:50:29.775563 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:50:40 crc kubenswrapper[4747]: I1202 17:50:40.761007 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:50:40 crc kubenswrapper[4747]: E1202 17:50:40.762459 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:50:51 crc kubenswrapper[4747]: I1202 17:50:51.761172 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:50:51 crc kubenswrapper[4747]: E1202 17:50:51.762179 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:51:03 crc kubenswrapper[4747]: I1202 17:51:03.760981 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:51:03 crc kubenswrapper[4747]: E1202 17:51:03.762330 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:51:14 crc kubenswrapper[4747]: I1202 17:51:14.761003 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:51:14 crc kubenswrapper[4747]: E1202 17:51:14.764120 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:51:28 crc kubenswrapper[4747]: I1202 17:51:28.761447 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:51:28 crc kubenswrapper[4747]: E1202 17:51:28.762890 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:51:40 crc kubenswrapper[4747]: I1202 17:51:40.761440 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:51:40 crc kubenswrapper[4747]: E1202 17:51:40.762783 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:51:55 crc kubenswrapper[4747]: I1202 17:51:55.761838 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:51:55 crc kubenswrapper[4747]: E1202 17:51:55.762659 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:52:08 crc kubenswrapper[4747]: I1202 17:52:08.761048 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:52:09 crc kubenswrapper[4747]: I1202 17:52:09.891928 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"1d8f0c8a99109d44b7610ef8a9fe27391714d9f2d4fa74ebb42d42c2897c3ea2"} Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.013767 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-8rdk7/must-gather-8vrmn"] Dec 02 17:52:25 crc kubenswrapper[4747]: E1202 17:52:25.014802 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2774cfc7-266d-4770-bc9d-7d22c87402a8" containerName="extract-utilities" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.014819 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2774cfc7-266d-4770-bc9d-7d22c87402a8" containerName="extract-utilities" Dec 02 17:52:25 crc kubenswrapper[4747]: E1202 17:52:25.014840 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5a3b242-68b3-4f35-ad72-a9d91e543720" containerName="copy" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.014848 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5a3b242-68b3-4f35-ad72-a9d91e543720" containerName="copy" Dec 02 17:52:25 crc kubenswrapper[4747]: E1202 17:52:25.014870 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2774cfc7-266d-4770-bc9d-7d22c87402a8" containerName="extract-content" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.014881 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2774cfc7-266d-4770-bc9d-7d22c87402a8" containerName="extract-content" Dec 02 17:52:25 crc kubenswrapper[4747]: E1202 17:52:25.014929 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5a3b242-68b3-4f35-ad72-a9d91e543720" containerName="gather" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.014939 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5a3b242-68b3-4f35-ad72-a9d91e543720" containerName="gather" Dec 02 17:52:25 crc kubenswrapper[4747]: E1202 17:52:25.014954 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2774cfc7-266d-4770-bc9d-7d22c87402a8" containerName="registry-server" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.014961 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2774cfc7-266d-4770-bc9d-7d22c87402a8" containerName="registry-server" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.015213 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5a3b242-68b3-4f35-ad72-a9d91e543720" containerName="copy" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.015246 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2774cfc7-266d-4770-bc9d-7d22c87402a8" containerName="registry-server" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.015267 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5a3b242-68b3-4f35-ad72-a9d91e543720" containerName="gather" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.016608 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8rdk7/must-gather-8vrmn" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.027844 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-8rdk7"/"openshift-service-ca.crt" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.028174 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-8rdk7"/"kube-root-ca.crt" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.036195 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-8rdk7/must-gather-8vrmn"] Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.210896 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/32dfc24d-44c9-4a76-aecf-76e86bc57bdd-must-gather-output\") pod \"must-gather-8vrmn\" (UID: \"32dfc24d-44c9-4a76-aecf-76e86bc57bdd\") " pod="openshift-must-gather-8rdk7/must-gather-8vrmn" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.211257 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b9f2\" (UniqueName: \"kubernetes.io/projected/32dfc24d-44c9-4a76-aecf-76e86bc57bdd-kube-api-access-2b9f2\") pod \"must-gather-8vrmn\" (UID: \"32dfc24d-44c9-4a76-aecf-76e86bc57bdd\") " pod="openshift-must-gather-8rdk7/must-gather-8vrmn" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.312351 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/32dfc24d-44c9-4a76-aecf-76e86bc57bdd-must-gather-output\") pod \"must-gather-8vrmn\" (UID: \"32dfc24d-44c9-4a76-aecf-76e86bc57bdd\") " pod="openshift-must-gather-8rdk7/must-gather-8vrmn" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.312436 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b9f2\" (UniqueName: \"kubernetes.io/projected/32dfc24d-44c9-4a76-aecf-76e86bc57bdd-kube-api-access-2b9f2\") pod \"must-gather-8vrmn\" (UID: \"32dfc24d-44c9-4a76-aecf-76e86bc57bdd\") " pod="openshift-must-gather-8rdk7/must-gather-8vrmn" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.312830 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/32dfc24d-44c9-4a76-aecf-76e86bc57bdd-must-gather-output\") pod \"must-gather-8vrmn\" (UID: \"32dfc24d-44c9-4a76-aecf-76e86bc57bdd\") " pod="openshift-must-gather-8rdk7/must-gather-8vrmn" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.361044 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b9f2\" (UniqueName: \"kubernetes.io/projected/32dfc24d-44c9-4a76-aecf-76e86bc57bdd-kube-api-access-2b9f2\") pod \"must-gather-8vrmn\" (UID: \"32dfc24d-44c9-4a76-aecf-76e86bc57bdd\") " pod="openshift-must-gather-8rdk7/must-gather-8vrmn" Dec 02 17:52:25 crc kubenswrapper[4747]: I1202 17:52:25.642736 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8rdk7/must-gather-8vrmn" Dec 02 17:52:26 crc kubenswrapper[4747]: I1202 17:52:26.187835 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-8rdk7/must-gather-8vrmn"] Dec 02 17:52:27 crc kubenswrapper[4747]: I1202 17:52:27.074088 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8rdk7/must-gather-8vrmn" event={"ID":"32dfc24d-44c9-4a76-aecf-76e86bc57bdd","Type":"ContainerStarted","Data":"afdef2bd739c4f494dd07d3afcc124149a6542b21f14af17f82379d52cfb55c6"} Dec 02 17:52:27 crc kubenswrapper[4747]: I1202 17:52:27.074486 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8rdk7/must-gather-8vrmn" event={"ID":"32dfc24d-44c9-4a76-aecf-76e86bc57bdd","Type":"ContainerStarted","Data":"619d50729b6eb2a933a45f4ff1f0eb15025c6019073455c702e8c4fcef5f7c39"} Dec 02 17:52:27 crc kubenswrapper[4747]: I1202 17:52:27.074499 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8rdk7/must-gather-8vrmn" event={"ID":"32dfc24d-44c9-4a76-aecf-76e86bc57bdd","Type":"ContainerStarted","Data":"f0c22cf25b84b44e1f32cd25ddbe5ef7cb5a9922f9ec394a5070a6ccac6fe5d6"} Dec 02 17:52:27 crc kubenswrapper[4747]: I1202 17:52:27.116274 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-8rdk7/must-gather-8vrmn" podStartSLOduration=3.116250167 podStartE2EDuration="3.116250167s" podCreationTimestamp="2025-12-02 17:52:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:52:27.101103186 +0000 UTC m=+4177.627991995" watchObservedRunningTime="2025-12-02 17:52:27.116250167 +0000 UTC m=+4177.643138916" Dec 02 17:52:30 crc kubenswrapper[4747]: I1202 17:52:30.362601 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-8rdk7/crc-debug-5mfkl"] Dec 02 17:52:30 crc kubenswrapper[4747]: I1202 17:52:30.364318 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8rdk7/crc-debug-5mfkl" Dec 02 17:52:30 crc kubenswrapper[4747]: I1202 17:52:30.366625 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-8rdk7"/"default-dockercfg-nxz82" Dec 02 17:52:30 crc kubenswrapper[4747]: I1202 17:52:30.430868 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7-host\") pod \"crc-debug-5mfkl\" (UID: \"d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7\") " pod="openshift-must-gather-8rdk7/crc-debug-5mfkl" Dec 02 17:52:30 crc kubenswrapper[4747]: I1202 17:52:30.431124 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnkwh\" (UniqueName: \"kubernetes.io/projected/d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7-kube-api-access-dnkwh\") pod \"crc-debug-5mfkl\" (UID: \"d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7\") " pod="openshift-must-gather-8rdk7/crc-debug-5mfkl" Dec 02 17:52:30 crc kubenswrapper[4747]: I1202 17:52:30.532563 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnkwh\" (UniqueName: \"kubernetes.io/projected/d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7-kube-api-access-dnkwh\") pod \"crc-debug-5mfkl\" (UID: \"d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7\") " pod="openshift-must-gather-8rdk7/crc-debug-5mfkl" Dec 02 17:52:30 crc kubenswrapper[4747]: I1202 17:52:30.532653 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7-host\") pod \"crc-debug-5mfkl\" (UID: \"d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7\") " pod="openshift-must-gather-8rdk7/crc-debug-5mfkl" Dec 02 17:52:30 crc kubenswrapper[4747]: I1202 17:52:30.532933 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7-host\") pod \"crc-debug-5mfkl\" (UID: \"d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7\") " pod="openshift-must-gather-8rdk7/crc-debug-5mfkl" Dec 02 17:52:30 crc kubenswrapper[4747]: I1202 17:52:30.551886 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnkwh\" (UniqueName: \"kubernetes.io/projected/d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7-kube-api-access-dnkwh\") pod \"crc-debug-5mfkl\" (UID: \"d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7\") " pod="openshift-must-gather-8rdk7/crc-debug-5mfkl" Dec 02 17:52:30 crc kubenswrapper[4747]: I1202 17:52:30.683452 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8rdk7/crc-debug-5mfkl" Dec 02 17:52:30 crc kubenswrapper[4747]: W1202 17:52:30.712662 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0f43d9b_55d3_4faf_bc3f_80ec2dfe68a7.slice/crio-b85189da8fd0bb1386a3de6fe0b7478ab038a03530efe56a3ef0165f613241c1 WatchSource:0}: Error finding container b85189da8fd0bb1386a3de6fe0b7478ab038a03530efe56a3ef0165f613241c1: Status 404 returned error can't find the container with id b85189da8fd0bb1386a3de6fe0b7478ab038a03530efe56a3ef0165f613241c1 Dec 02 17:52:31 crc kubenswrapper[4747]: I1202 17:52:31.112567 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8rdk7/crc-debug-5mfkl" event={"ID":"d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7","Type":"ContainerStarted","Data":"39ccc57efad0d501afe7c4b37e9fceafceaa376e2bf9be92fa12ee594051f733"} Dec 02 17:52:31 crc kubenswrapper[4747]: I1202 17:52:31.113122 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8rdk7/crc-debug-5mfkl" event={"ID":"d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7","Type":"ContainerStarted","Data":"b85189da8fd0bb1386a3de6fe0b7478ab038a03530efe56a3ef0165f613241c1"} Dec 02 17:52:31 crc kubenswrapper[4747]: I1202 17:52:31.143349 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-8rdk7/crc-debug-5mfkl" podStartSLOduration=1.14333507 podStartE2EDuration="1.14333507s" podCreationTimestamp="2025-12-02 17:52:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 17:52:31.133810248 +0000 UTC m=+4181.660699007" watchObservedRunningTime="2025-12-02 17:52:31.14333507 +0000 UTC m=+4181.670223819" Dec 02 17:53:02 crc kubenswrapper[4747]: I1202 17:53:02.416963 4747 generic.go:334] "Generic (PLEG): container finished" podID="d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7" containerID="39ccc57efad0d501afe7c4b37e9fceafceaa376e2bf9be92fa12ee594051f733" exitCode=0 Dec 02 17:53:02 crc kubenswrapper[4747]: I1202 17:53:02.417343 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8rdk7/crc-debug-5mfkl" event={"ID":"d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7","Type":"ContainerDied","Data":"39ccc57efad0d501afe7c4b37e9fceafceaa376e2bf9be92fa12ee594051f733"} Dec 02 17:53:03 crc kubenswrapper[4747]: I1202 17:53:03.533234 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8rdk7/crc-debug-5mfkl" Dec 02 17:53:03 crc kubenswrapper[4747]: I1202 17:53:03.564699 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-8rdk7/crc-debug-5mfkl"] Dec 02 17:53:03 crc kubenswrapper[4747]: I1202 17:53:03.576643 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-8rdk7/crc-debug-5mfkl"] Dec 02 17:53:03 crc kubenswrapper[4747]: I1202 17:53:03.652012 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dnkwh\" (UniqueName: \"kubernetes.io/projected/d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7-kube-api-access-dnkwh\") pod \"d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7\" (UID: \"d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7\") " Dec 02 17:53:03 crc kubenswrapper[4747]: I1202 17:53:03.652096 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7-host\") pod \"d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7\" (UID: \"d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7\") " Dec 02 17:53:03 crc kubenswrapper[4747]: I1202 17:53:03.652692 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7-host" (OuterVolumeSpecName: "host") pod "d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7" (UID: "d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 17:53:03 crc kubenswrapper[4747]: I1202 17:53:03.656844 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7-kube-api-access-dnkwh" (OuterVolumeSpecName: "kube-api-access-dnkwh") pod "d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7" (UID: "d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7"). InnerVolumeSpecName "kube-api-access-dnkwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:53:03 crc kubenswrapper[4747]: I1202 17:53:03.754421 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dnkwh\" (UniqueName: \"kubernetes.io/projected/d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7-kube-api-access-dnkwh\") on node \"crc\" DevicePath \"\"" Dec 02 17:53:03 crc kubenswrapper[4747]: I1202 17:53:03.754460 4747 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7-host\") on node \"crc\" DevicePath \"\"" Dec 02 17:53:03 crc kubenswrapper[4747]: I1202 17:53:03.775195 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7" path="/var/lib/kubelet/pods/d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7/volumes" Dec 02 17:53:04 crc kubenswrapper[4747]: I1202 17:53:04.438881 4747 scope.go:117] "RemoveContainer" containerID="39ccc57efad0d501afe7c4b37e9fceafceaa376e2bf9be92fa12ee594051f733" Dec 02 17:53:04 crc kubenswrapper[4747]: I1202 17:53:04.438925 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8rdk7/crc-debug-5mfkl" Dec 02 17:53:04 crc kubenswrapper[4747]: I1202 17:53:04.785468 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-8rdk7/crc-debug-5k9n4"] Dec 02 17:53:04 crc kubenswrapper[4747]: E1202 17:53:04.785886 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7" containerName="container-00" Dec 02 17:53:04 crc kubenswrapper[4747]: I1202 17:53:04.785901 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7" containerName="container-00" Dec 02 17:53:04 crc kubenswrapper[4747]: I1202 17:53:04.786127 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0f43d9b-55d3-4faf-bc3f-80ec2dfe68a7" containerName="container-00" Dec 02 17:53:04 crc kubenswrapper[4747]: I1202 17:53:04.786745 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8rdk7/crc-debug-5k9n4" Dec 02 17:53:04 crc kubenswrapper[4747]: I1202 17:53:04.789214 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-8rdk7"/"default-dockercfg-nxz82" Dec 02 17:53:04 crc kubenswrapper[4747]: I1202 17:53:04.875956 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6stkk\" (UniqueName: \"kubernetes.io/projected/6bf5b6d3-e193-42ce-b384-edf7bbe49848-kube-api-access-6stkk\") pod \"crc-debug-5k9n4\" (UID: \"6bf5b6d3-e193-42ce-b384-edf7bbe49848\") " pod="openshift-must-gather-8rdk7/crc-debug-5k9n4" Dec 02 17:53:04 crc kubenswrapper[4747]: I1202 17:53:04.876072 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6bf5b6d3-e193-42ce-b384-edf7bbe49848-host\") pod \"crc-debug-5k9n4\" (UID: \"6bf5b6d3-e193-42ce-b384-edf7bbe49848\") " pod="openshift-must-gather-8rdk7/crc-debug-5k9n4" Dec 02 17:53:04 crc kubenswrapper[4747]: I1202 17:53:04.977739 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6bf5b6d3-e193-42ce-b384-edf7bbe49848-host\") pod \"crc-debug-5k9n4\" (UID: \"6bf5b6d3-e193-42ce-b384-edf7bbe49848\") " pod="openshift-must-gather-8rdk7/crc-debug-5k9n4" Dec 02 17:53:04 crc kubenswrapper[4747]: I1202 17:53:04.977920 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6bf5b6d3-e193-42ce-b384-edf7bbe49848-host\") pod \"crc-debug-5k9n4\" (UID: \"6bf5b6d3-e193-42ce-b384-edf7bbe49848\") " pod="openshift-must-gather-8rdk7/crc-debug-5k9n4" Dec 02 17:53:04 crc kubenswrapper[4747]: I1202 17:53:04.978124 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6stkk\" (UniqueName: \"kubernetes.io/projected/6bf5b6d3-e193-42ce-b384-edf7bbe49848-kube-api-access-6stkk\") pod \"crc-debug-5k9n4\" (UID: \"6bf5b6d3-e193-42ce-b384-edf7bbe49848\") " pod="openshift-must-gather-8rdk7/crc-debug-5k9n4" Dec 02 17:53:04 crc kubenswrapper[4747]: I1202 17:53:04.996099 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6stkk\" (UniqueName: \"kubernetes.io/projected/6bf5b6d3-e193-42ce-b384-edf7bbe49848-kube-api-access-6stkk\") pod \"crc-debug-5k9n4\" (UID: \"6bf5b6d3-e193-42ce-b384-edf7bbe49848\") " pod="openshift-must-gather-8rdk7/crc-debug-5k9n4" Dec 02 17:53:05 crc kubenswrapper[4747]: I1202 17:53:05.101881 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8rdk7/crc-debug-5k9n4" Dec 02 17:53:05 crc kubenswrapper[4747]: W1202 17:53:05.151764 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6bf5b6d3_e193_42ce_b384_edf7bbe49848.slice/crio-c9bf7c4dd87a1a727f32633e91a46f58964b8bcb252067905c6b8878418bbadb WatchSource:0}: Error finding container c9bf7c4dd87a1a727f32633e91a46f58964b8bcb252067905c6b8878418bbadb: Status 404 returned error can't find the container with id c9bf7c4dd87a1a727f32633e91a46f58964b8bcb252067905c6b8878418bbadb Dec 02 17:53:05 crc kubenswrapper[4747]: I1202 17:53:05.456062 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8rdk7/crc-debug-5k9n4" event={"ID":"6bf5b6d3-e193-42ce-b384-edf7bbe49848","Type":"ContainerStarted","Data":"c9bf7c4dd87a1a727f32633e91a46f58964b8bcb252067905c6b8878418bbadb"} Dec 02 17:53:06 crc kubenswrapper[4747]: I1202 17:53:06.469449 4747 generic.go:334] "Generic (PLEG): container finished" podID="6bf5b6d3-e193-42ce-b384-edf7bbe49848" containerID="387cb270cb606ee4219397044f1787e5367e63722ffc2ccc83c7eae5ef68f7cb" exitCode=0 Dec 02 17:53:06 crc kubenswrapper[4747]: I1202 17:53:06.469487 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8rdk7/crc-debug-5k9n4" event={"ID":"6bf5b6d3-e193-42ce-b384-edf7bbe49848","Type":"ContainerDied","Data":"387cb270cb606ee4219397044f1787e5367e63722ffc2ccc83c7eae5ef68f7cb"} Dec 02 17:53:06 crc kubenswrapper[4747]: I1202 17:53:06.906798 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-8rdk7/crc-debug-5k9n4"] Dec 02 17:53:06 crc kubenswrapper[4747]: I1202 17:53:06.917359 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-8rdk7/crc-debug-5k9n4"] Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.143941 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8rdk7/crc-debug-5k9n4" Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.154326 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-8rdk7/crc-debug-2tk97"] Dec 02 17:53:08 crc kubenswrapper[4747]: E1202 17:53:08.154745 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bf5b6d3-e193-42ce-b384-edf7bbe49848" containerName="container-00" Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.154767 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bf5b6d3-e193-42ce-b384-edf7bbe49848" containerName="container-00" Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.155062 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bf5b6d3-e193-42ce-b384-edf7bbe49848" containerName="container-00" Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.155686 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8rdk7/crc-debug-2tk97" Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.244119 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6stkk\" (UniqueName: \"kubernetes.io/projected/6bf5b6d3-e193-42ce-b384-edf7bbe49848-kube-api-access-6stkk\") pod \"6bf5b6d3-e193-42ce-b384-edf7bbe49848\" (UID: \"6bf5b6d3-e193-42ce-b384-edf7bbe49848\") " Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.244573 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6bf5b6d3-e193-42ce-b384-edf7bbe49848-host\") pod \"6bf5b6d3-e193-42ce-b384-edf7bbe49848\" (UID: \"6bf5b6d3-e193-42ce-b384-edf7bbe49848\") " Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.244632 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6bf5b6d3-e193-42ce-b384-edf7bbe49848-host" (OuterVolumeSpecName: "host") pod "6bf5b6d3-e193-42ce-b384-edf7bbe49848" (UID: "6bf5b6d3-e193-42ce-b384-edf7bbe49848"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.245298 4747 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6bf5b6d3-e193-42ce-b384-edf7bbe49848-host\") on node \"crc\" DevicePath \"\"" Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.250891 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bf5b6d3-e193-42ce-b384-edf7bbe49848-kube-api-access-6stkk" (OuterVolumeSpecName: "kube-api-access-6stkk") pod "6bf5b6d3-e193-42ce-b384-edf7bbe49848" (UID: "6bf5b6d3-e193-42ce-b384-edf7bbe49848"). InnerVolumeSpecName "kube-api-access-6stkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.347100 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6-host\") pod \"crc-debug-2tk97\" (UID: \"ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6\") " pod="openshift-must-gather-8rdk7/crc-debug-2tk97" Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.347195 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jxwq\" (UniqueName: \"kubernetes.io/projected/ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6-kube-api-access-9jxwq\") pod \"crc-debug-2tk97\" (UID: \"ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6\") " pod="openshift-must-gather-8rdk7/crc-debug-2tk97" Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.347519 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6stkk\" (UniqueName: \"kubernetes.io/projected/6bf5b6d3-e193-42ce-b384-edf7bbe49848-kube-api-access-6stkk\") on node \"crc\" DevicePath \"\"" Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.449358 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6-host\") pod \"crc-debug-2tk97\" (UID: \"ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6\") " pod="openshift-must-gather-8rdk7/crc-debug-2tk97" Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.449477 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6-host\") pod \"crc-debug-2tk97\" (UID: \"ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6\") " pod="openshift-must-gather-8rdk7/crc-debug-2tk97" Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.449499 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jxwq\" (UniqueName: \"kubernetes.io/projected/ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6-kube-api-access-9jxwq\") pod \"crc-debug-2tk97\" (UID: \"ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6\") " pod="openshift-must-gather-8rdk7/crc-debug-2tk97" Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.477265 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jxwq\" (UniqueName: \"kubernetes.io/projected/ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6-kube-api-access-9jxwq\") pod \"crc-debug-2tk97\" (UID: \"ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6\") " pod="openshift-must-gather-8rdk7/crc-debug-2tk97" Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.491225 4747 scope.go:117] "RemoveContainer" containerID="387cb270cb606ee4219397044f1787e5367e63722ffc2ccc83c7eae5ef68f7cb" Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.491395 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8rdk7/crc-debug-5k9n4" Dec 02 17:53:08 crc kubenswrapper[4747]: I1202 17:53:08.771028 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8rdk7/crc-debug-2tk97" Dec 02 17:53:09 crc kubenswrapper[4747]: I1202 17:53:09.503846 4747 generic.go:334] "Generic (PLEG): container finished" podID="ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6" containerID="6cf4567a0324626ff6ab09d94efc04b79c2fb36f5f941d819ab6280034478f6c" exitCode=0 Dec 02 17:53:09 crc kubenswrapper[4747]: I1202 17:53:09.503888 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8rdk7/crc-debug-2tk97" event={"ID":"ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6","Type":"ContainerDied","Data":"6cf4567a0324626ff6ab09d94efc04b79c2fb36f5f941d819ab6280034478f6c"} Dec 02 17:53:09 crc kubenswrapper[4747]: I1202 17:53:09.504262 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8rdk7/crc-debug-2tk97" event={"ID":"ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6","Type":"ContainerStarted","Data":"08a85ef1063e7b4352f1bec0e039480ff9d21c73bb359079119b248effdf4225"} Dec 02 17:53:09 crc kubenswrapper[4747]: I1202 17:53:09.548675 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-8rdk7/crc-debug-2tk97"] Dec 02 17:53:09 crc kubenswrapper[4747]: I1202 17:53:09.557284 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-8rdk7/crc-debug-2tk97"] Dec 02 17:53:09 crc kubenswrapper[4747]: I1202 17:53:09.787536 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bf5b6d3-e193-42ce-b384-edf7bbe49848" path="/var/lib/kubelet/pods/6bf5b6d3-e193-42ce-b384-edf7bbe49848/volumes" Dec 02 17:53:10 crc kubenswrapper[4747]: I1202 17:53:10.647147 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8rdk7/crc-debug-2tk97" Dec 02 17:53:10 crc kubenswrapper[4747]: I1202 17:53:10.796818 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jxwq\" (UniqueName: \"kubernetes.io/projected/ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6-kube-api-access-9jxwq\") pod \"ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6\" (UID: \"ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6\") " Dec 02 17:53:10 crc kubenswrapper[4747]: I1202 17:53:10.797056 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6-host\") pod \"ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6\" (UID: \"ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6\") " Dec 02 17:53:10 crc kubenswrapper[4747]: I1202 17:53:10.798036 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6-host" (OuterVolumeSpecName: "host") pod "ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6" (UID: "ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 17:53:10 crc kubenswrapper[4747]: I1202 17:53:10.806412 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6-kube-api-access-9jxwq" (OuterVolumeSpecName: "kube-api-access-9jxwq") pod "ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6" (UID: "ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6"). InnerVolumeSpecName "kube-api-access-9jxwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:53:10 crc kubenswrapper[4747]: I1202 17:53:10.900293 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jxwq\" (UniqueName: \"kubernetes.io/projected/ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6-kube-api-access-9jxwq\") on node \"crc\" DevicePath \"\"" Dec 02 17:53:10 crc kubenswrapper[4747]: I1202 17:53:10.900343 4747 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6-host\") on node \"crc\" DevicePath \"\"" Dec 02 17:53:11 crc kubenswrapper[4747]: I1202 17:53:11.524407 4747 scope.go:117] "RemoveContainer" containerID="6cf4567a0324626ff6ab09d94efc04b79c2fb36f5f941d819ab6280034478f6c" Dec 02 17:53:11 crc kubenswrapper[4747]: I1202 17:53:11.524475 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8rdk7/crc-debug-2tk97" Dec 02 17:53:11 crc kubenswrapper[4747]: I1202 17:53:11.771898 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6" path="/var/lib/kubelet/pods/ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6/volumes" Dec 02 17:53:39 crc kubenswrapper[4747]: I1202 17:53:39.825973 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6c8c69f76b-hxmzj_6b329ab1-0ac2-4758-b497-7650406ab087/barbican-api-log/0.log" Dec 02 17:53:39 crc kubenswrapper[4747]: I1202 17:53:39.844028 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6c8c69f76b-hxmzj_6b329ab1-0ac2-4758-b497-7650406ab087/barbican-api/0.log" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.088542 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5kj9j"] Dec 02 17:53:40 crc kubenswrapper[4747]: E1202 17:53:40.090229 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6" containerName="container-00" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.090261 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6" containerName="container-00" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.090542 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec917c55-3fdb-4b5b-8b4b-0f3c68594ab6" containerName="container-00" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.094737 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5kj9j" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.109173 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5kj9j"] Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.129155 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7c69975b48-9pn7p_cf9be744-ba2f-43f6-bb0c-ab806681aeb2/barbican-keystone-listener/0.log" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.195989 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/655aff42-4a4b-493d-a928-04fe52e0feea-utilities\") pod \"certified-operators-5kj9j\" (UID: \"655aff42-4a4b-493d-a928-04fe52e0feea\") " pod="openshift-marketplace/certified-operators-5kj9j" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.196036 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq7cz\" (UniqueName: \"kubernetes.io/projected/655aff42-4a4b-493d-a928-04fe52e0feea-kube-api-access-mq7cz\") pod \"certified-operators-5kj9j\" (UID: \"655aff42-4a4b-493d-a928-04fe52e0feea\") " pod="openshift-marketplace/certified-operators-5kj9j" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.196126 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/655aff42-4a4b-493d-a928-04fe52e0feea-catalog-content\") pod \"certified-operators-5kj9j\" (UID: \"655aff42-4a4b-493d-a928-04fe52e0feea\") " pod="openshift-marketplace/certified-operators-5kj9j" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.209330 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7c69975b48-9pn7p_cf9be744-ba2f-43f6-bb0c-ab806681aeb2/barbican-keystone-listener-log/0.log" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.298626 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/655aff42-4a4b-493d-a928-04fe52e0feea-utilities\") pod \"certified-operators-5kj9j\" (UID: \"655aff42-4a4b-493d-a928-04fe52e0feea\") " pod="openshift-marketplace/certified-operators-5kj9j" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.298695 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq7cz\" (UniqueName: \"kubernetes.io/projected/655aff42-4a4b-493d-a928-04fe52e0feea-kube-api-access-mq7cz\") pod \"certified-operators-5kj9j\" (UID: \"655aff42-4a4b-493d-a928-04fe52e0feea\") " pod="openshift-marketplace/certified-operators-5kj9j" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.298722 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/655aff42-4a4b-493d-a928-04fe52e0feea-catalog-content\") pod \"certified-operators-5kj9j\" (UID: \"655aff42-4a4b-493d-a928-04fe52e0feea\") " pod="openshift-marketplace/certified-operators-5kj9j" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.299220 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/655aff42-4a4b-493d-a928-04fe52e0feea-utilities\") pod \"certified-operators-5kj9j\" (UID: \"655aff42-4a4b-493d-a928-04fe52e0feea\") " pod="openshift-marketplace/certified-operators-5kj9j" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.299236 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/655aff42-4a4b-493d-a928-04fe52e0feea-catalog-content\") pod \"certified-operators-5kj9j\" (UID: \"655aff42-4a4b-493d-a928-04fe52e0feea\") " pod="openshift-marketplace/certified-operators-5kj9j" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.319816 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq7cz\" (UniqueName: \"kubernetes.io/projected/655aff42-4a4b-493d-a928-04fe52e0feea-kube-api-access-mq7cz\") pod \"certified-operators-5kj9j\" (UID: \"655aff42-4a4b-493d-a928-04fe52e0feea\") " pod="openshift-marketplace/certified-operators-5kj9j" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.414885 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-57f999f495-w8tt5_65391885-7b23-49fc-b435-ea21781c2f4d/barbican-worker/0.log" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.444570 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5kj9j" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.495915 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-57f999f495-w8tt5_65391885-7b23-49fc-b435-ea21781c2f4d/barbican-worker-log/0.log" Dec 02 17:53:40 crc kubenswrapper[4747]: I1202 17:53:40.531410 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-vj4fd_0af08a10-c636-49d7-8b9f-ae2bdd2e6371/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:53:41 crc kubenswrapper[4747]: I1202 17:53:40.935891 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3feb84af-eb5c-4165-ba8b-b0c55cd2c369/ceilometer-notification-agent/0.log" Dec 02 17:53:41 crc kubenswrapper[4747]: I1202 17:53:41.071979 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3feb84af-eb5c-4165-ba8b-b0c55cd2c369/ceilometer-central-agent/0.log" Dec 02 17:53:41 crc kubenswrapper[4747]: I1202 17:53:41.084142 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3feb84af-eb5c-4165-ba8b-b0c55cd2c369/proxy-httpd/0.log" Dec 02 17:53:41 crc kubenswrapper[4747]: I1202 17:53:41.475776 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5kj9j"] Dec 02 17:53:41 crc kubenswrapper[4747]: I1202 17:53:41.564359 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3feb84af-eb5c-4165-ba8b-b0c55cd2c369/sg-core/0.log" Dec 02 17:53:41 crc kubenswrapper[4747]: I1202 17:53:41.573573 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_7ae9edb9-7743-4454-adcf-86c0c8587943/cinder-api-log/0.log" Dec 02 17:53:41 crc kubenswrapper[4747]: I1202 17:53:41.589549 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_7ae9edb9-7743-4454-adcf-86c0c8587943/cinder-api/0.log" Dec 02 17:53:41 crc kubenswrapper[4747]: I1202 17:53:41.757271 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0524feb0-bce7-45c4-8cd9-38b439a4a2bb/cinder-scheduler/0.log" Dec 02 17:53:41 crc kubenswrapper[4747]: I1202 17:53:41.821427 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0524feb0-bce7-45c4-8cd9-38b439a4a2bb/probe/0.log" Dec 02 17:53:41 crc kubenswrapper[4747]: I1202 17:53:41.832774 4747 generic.go:334] "Generic (PLEG): container finished" podID="655aff42-4a4b-493d-a928-04fe52e0feea" containerID="b90309f236d4be17ad41fc311d26e30f835402a5cf6df765e136eeb4e7c48888" exitCode=0 Dec 02 17:53:41 crc kubenswrapper[4747]: I1202 17:53:41.832828 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5kj9j" event={"ID":"655aff42-4a4b-493d-a928-04fe52e0feea","Type":"ContainerDied","Data":"b90309f236d4be17ad41fc311d26e30f835402a5cf6df765e136eeb4e7c48888"} Dec 02 17:53:41 crc kubenswrapper[4747]: I1202 17:53:41.832860 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5kj9j" event={"ID":"655aff42-4a4b-493d-a928-04fe52e0feea","Type":"ContainerStarted","Data":"f1b346f30ff5212d460a7c0dc44ff6b8516a75382efb353d75be8b5463d97706"} Dec 02 17:53:41 crc kubenswrapper[4747]: I1202 17:53:41.845661 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 17:53:41 crc kubenswrapper[4747]: I1202 17:53:41.958042 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-kg4kf_cec55b8b-af9a-473f-b92b-e3008a596073/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:53:42 crc kubenswrapper[4747]: I1202 17:53:42.055036 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-qpvn2_7e920075-6847-4768-b952-8a76f23acddd/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:53:42 crc kubenswrapper[4747]: I1202 17:53:42.181334 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-5c9gt_b72c7491-0f7d-4d44-9e37-7d04f3046a38/init/0.log" Dec 02 17:53:42 crc kubenswrapper[4747]: I1202 17:53:42.343011 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-5c9gt_b72c7491-0f7d-4d44-9e37-7d04f3046a38/init/0.log" Dec 02 17:53:42 crc kubenswrapper[4747]: I1202 17:53:42.437047 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-5c9gt_b72c7491-0f7d-4d44-9e37-7d04f3046a38/dnsmasq-dns/0.log" Dec 02 17:53:42 crc kubenswrapper[4747]: I1202 17:53:42.441456 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-q57jb_df3869df-566d-4296-9b5d-555260ca14dd/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:53:42 crc kubenswrapper[4747]: I1202 17:53:42.610525 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ab63cfa7-2c4a-41cb-8e9e-707d84886893/glance-httpd/0.log" Dec 02 17:53:42 crc kubenswrapper[4747]: I1202 17:53:42.656935 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ab63cfa7-2c4a-41cb-8e9e-707d84886893/glance-log/0.log" Dec 02 17:53:42 crc kubenswrapper[4747]: I1202 17:53:42.804114 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f419079c-56d7-40cf-bfcb-7cf6a43c44ed/glance-log/0.log" Dec 02 17:53:42 crc kubenswrapper[4747]: I1202 17:53:42.811368 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f419079c-56d7-40cf-bfcb-7cf6a43c44ed/glance-httpd/0.log" Dec 02 17:53:43 crc kubenswrapper[4747]: I1202 17:53:43.016531 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7c85649748-scrqf_4ec3f1d7-119b-40fa-b0f4-3d2f353ee162/horizon/0.log" Dec 02 17:53:43 crc kubenswrapper[4747]: I1202 17:53:43.167583 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-9bhxz_a7f1f139-20ca-4e5e-b7f1-9a9de34ee6e1/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:53:43 crc kubenswrapper[4747]: I1202 17:53:43.351330 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-65pqk_49daddcc-4224-45ac-b0ba-cb24ddd68dee/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:53:43 crc kubenswrapper[4747]: I1202 17:53:43.370660 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7c85649748-scrqf_4ec3f1d7-119b-40fa-b0f4-3d2f353ee162/horizon-log/0.log" Dec 02 17:53:43 crc kubenswrapper[4747]: E1202 17:53:43.456752 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod655aff42_4a4b_493d_a928_04fe52e0feea.slice/crio-conmon-66abbb618caff754096dbe5d4bc9713f2e70f1b9c403e6ae6bf216a0a88cfe7c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod655aff42_4a4b_493d_a928_04fe52e0feea.slice/crio-66abbb618caff754096dbe5d4bc9713f2e70f1b9c403e6ae6bf216a0a88cfe7c.scope\": RecentStats: unable to find data in memory cache]" Dec 02 17:53:43 crc kubenswrapper[4747]: I1202 17:53:43.523198 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7d78c5c778-tg55f_687002d8-0be7-43ec-a1aa-61b724aec872/keystone-api/0.log" Dec 02 17:53:43 crc kubenswrapper[4747]: I1202 17:53:43.717603 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_f7425e95-34bc-464a-8d30-74ec67cd1760/kube-state-metrics/0.log" Dec 02 17:53:43 crc kubenswrapper[4747]: I1202 17:53:43.735353 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-nzdtk_577bb9f7-053c-4da3-99f4-b2f8c2e5c3c8/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:53:43 crc kubenswrapper[4747]: I1202 17:53:43.850021 4747 generic.go:334] "Generic (PLEG): container finished" podID="655aff42-4a4b-493d-a928-04fe52e0feea" containerID="66abbb618caff754096dbe5d4bc9713f2e70f1b9c403e6ae6bf216a0a88cfe7c" exitCode=0 Dec 02 17:53:43 crc kubenswrapper[4747]: I1202 17:53:43.850065 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5kj9j" event={"ID":"655aff42-4a4b-493d-a928-04fe52e0feea","Type":"ContainerDied","Data":"66abbb618caff754096dbe5d4bc9713f2e70f1b9c403e6ae6bf216a0a88cfe7c"} Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.126829 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7b56d86469-kh76t_8c47782d-dc20-451f-ac66-8555693b819f/neutron-api/0.log" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.151437 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7b56d86469-kh76t_8c47782d-dc20-451f-ac66-8555693b819f/neutron-httpd/0.log" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.234737 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-wr8jc_61785f6a-0ba1-41a6-bff7-2558d21779af/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.251941 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-p986s"] Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.253971 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p986s" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.303348 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p986s"] Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.366191 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c7522fe-816b-4fe5-9b3e-7c083555c73b-utilities\") pod \"redhat-marketplace-p986s\" (UID: \"8c7522fe-816b-4fe5-9b3e-7c083555c73b\") " pod="openshift-marketplace/redhat-marketplace-p986s" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.366618 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2pld\" (UniqueName: \"kubernetes.io/projected/8c7522fe-816b-4fe5-9b3e-7c083555c73b-kube-api-access-n2pld\") pod \"redhat-marketplace-p986s\" (UID: \"8c7522fe-816b-4fe5-9b3e-7c083555c73b\") " pod="openshift-marketplace/redhat-marketplace-p986s" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.366710 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c7522fe-816b-4fe5-9b3e-7c083555c73b-catalog-content\") pod \"redhat-marketplace-p986s\" (UID: \"8c7522fe-816b-4fe5-9b3e-7c083555c73b\") " pod="openshift-marketplace/redhat-marketplace-p986s" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.469426 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c7522fe-816b-4fe5-9b3e-7c083555c73b-catalog-content\") pod \"redhat-marketplace-p986s\" (UID: \"8c7522fe-816b-4fe5-9b3e-7c083555c73b\") " pod="openshift-marketplace/redhat-marketplace-p986s" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.469502 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c7522fe-816b-4fe5-9b3e-7c083555c73b-utilities\") pod \"redhat-marketplace-p986s\" (UID: \"8c7522fe-816b-4fe5-9b3e-7c083555c73b\") " pod="openshift-marketplace/redhat-marketplace-p986s" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.469573 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2pld\" (UniqueName: \"kubernetes.io/projected/8c7522fe-816b-4fe5-9b3e-7c083555c73b-kube-api-access-n2pld\") pod \"redhat-marketplace-p986s\" (UID: \"8c7522fe-816b-4fe5-9b3e-7c083555c73b\") " pod="openshift-marketplace/redhat-marketplace-p986s" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.470187 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c7522fe-816b-4fe5-9b3e-7c083555c73b-utilities\") pod \"redhat-marketplace-p986s\" (UID: \"8c7522fe-816b-4fe5-9b3e-7c083555c73b\") " pod="openshift-marketplace/redhat-marketplace-p986s" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.470319 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c7522fe-816b-4fe5-9b3e-7c083555c73b-catalog-content\") pod \"redhat-marketplace-p986s\" (UID: \"8c7522fe-816b-4fe5-9b3e-7c083555c73b\") " pod="openshift-marketplace/redhat-marketplace-p986s" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.494043 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2pld\" (UniqueName: \"kubernetes.io/projected/8c7522fe-816b-4fe5-9b3e-7c083555c73b-kube-api-access-n2pld\") pod \"redhat-marketplace-p986s\" (UID: \"8c7522fe-816b-4fe5-9b3e-7c083555c73b\") " pod="openshift-marketplace/redhat-marketplace-p986s" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.591499 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p986s" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.861282 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dkhsn"] Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.866624 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5kj9j" event={"ID":"655aff42-4a4b-493d-a928-04fe52e0feea","Type":"ContainerStarted","Data":"60ac3ced6b635231e7de853ca3acbc3e60f0c3f9a27a2620d0994c49c9e078d2"} Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.866824 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dkhsn" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.871589 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dkhsn"] Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.883292 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5kj9j" podStartSLOduration=2.470234769 podStartE2EDuration="4.883269583s" podCreationTimestamp="2025-12-02 17:53:40 +0000 UTC" firstStartedPulling="2025-12-02 17:53:41.845407173 +0000 UTC m=+4252.372295922" lastFinishedPulling="2025-12-02 17:53:44.258441987 +0000 UTC m=+4254.785330736" observedRunningTime="2025-12-02 17:53:44.878742064 +0000 UTC m=+4255.405630803" watchObservedRunningTime="2025-12-02 17:53:44.883269583 +0000 UTC m=+4255.410158332" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.983720 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-utilities\") pod \"redhat-operators-dkhsn\" (UID: \"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd\") " pod="openshift-marketplace/redhat-operators-dkhsn" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.983799 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9tt7\" (UniqueName: \"kubernetes.io/projected/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-kube-api-access-v9tt7\") pod \"redhat-operators-dkhsn\" (UID: \"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd\") " pod="openshift-marketplace/redhat-operators-dkhsn" Dec 02 17:53:44 crc kubenswrapper[4747]: I1202 17:53:44.983859 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-catalog-content\") pod \"redhat-operators-dkhsn\" (UID: \"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd\") " pod="openshift-marketplace/redhat-operators-dkhsn" Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.072666 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_a264e5ce-56ed-4ffa-aee8-9951a0cdd335/nova-api-log/0.log" Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.085919 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-utilities\") pod \"redhat-operators-dkhsn\" (UID: \"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd\") " pod="openshift-marketplace/redhat-operators-dkhsn" Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.086009 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9tt7\" (UniqueName: \"kubernetes.io/projected/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-kube-api-access-v9tt7\") pod \"redhat-operators-dkhsn\" (UID: \"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd\") " pod="openshift-marketplace/redhat-operators-dkhsn" Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.086068 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-catalog-content\") pod \"redhat-operators-dkhsn\" (UID: \"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd\") " pod="openshift-marketplace/redhat-operators-dkhsn" Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.086575 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-catalog-content\") pod \"redhat-operators-dkhsn\" (UID: \"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd\") " pod="openshift-marketplace/redhat-operators-dkhsn" Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.086800 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-utilities\") pod \"redhat-operators-dkhsn\" (UID: \"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd\") " pod="openshift-marketplace/redhat-operators-dkhsn" Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.109931 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9tt7\" (UniqueName: \"kubernetes.io/projected/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-kube-api-access-v9tt7\") pod \"redhat-operators-dkhsn\" (UID: \"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd\") " pod="openshift-marketplace/redhat-operators-dkhsn" Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.125461 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p986s"] Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.225149 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dkhsn" Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.265355 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_a3523196-7b69-4247-b9fa-6b83ed18926a/nova-cell0-conductor-conductor/0.log" Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.643552 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_3a349a37-9c2c-4397-8fb3-ba6cdf8c5e2e/nova-cell1-conductor-conductor/0.log" Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.666403 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_a264e5ce-56ed-4ffa-aee8-9951a0cdd335/nova-api-api/0.log" Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.725380 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dkhsn"] Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.778110 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_49fa0781-199d-40a5-958f-591c534f25cc/nova-cell1-novncproxy-novncproxy/0.log" Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.877928 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dkhsn" event={"ID":"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd","Type":"ContainerStarted","Data":"e5dc5e24f7294e9f46fb67600e97aff9ee71c1bea3d1b15e171d3551ea9b47a9"} Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.884360 4747 generic.go:334] "Generic (PLEG): container finished" podID="8c7522fe-816b-4fe5-9b3e-7c083555c73b" containerID="51f5757a96e980947f0fed28d9f30209f49c89724425559ac73a62e7289d30dd" exitCode=0 Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.885565 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p986s" event={"ID":"8c7522fe-816b-4fe5-9b3e-7c083555c73b","Type":"ContainerDied","Data":"51f5757a96e980947f0fed28d9f30209f49c89724425559ac73a62e7289d30dd"} Dec 02 17:53:45 crc kubenswrapper[4747]: I1202 17:53:45.885603 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p986s" event={"ID":"8c7522fe-816b-4fe5-9b3e-7c083555c73b","Type":"ContainerStarted","Data":"d3b984be645e4b40967bee53282f9487d0b8ab594d2b4a668a0c8538e9f6a6e9"} Dec 02 17:53:46 crc kubenswrapper[4747]: I1202 17:53:46.084513 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_4236f6d8-0d70-4768-8eb5-3847fafdede8/nova-metadata-log/0.log" Dec 02 17:53:46 crc kubenswrapper[4747]: I1202 17:53:46.144674 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-dvmc6_6bf55284-16a0-45c0-8ce9-9e074f7d7e0a/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:53:46 crc kubenswrapper[4747]: I1202 17:53:46.596441 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_de9ec28c-1521-4af6-8473-fe8bf1cabf5d/mysql-bootstrap/0.log" Dec 02 17:53:46 crc kubenswrapper[4747]: I1202 17:53:46.651711 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_0d9349be-b597-431f-badc-50d7da952f70/nova-scheduler-scheduler/0.log" Dec 02 17:53:46 crc kubenswrapper[4747]: I1202 17:53:46.846228 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_de9ec28c-1521-4af6-8473-fe8bf1cabf5d/mysql-bootstrap/0.log" Dec 02 17:53:46 crc kubenswrapper[4747]: I1202 17:53:46.910141 4747 generic.go:334] "Generic (PLEG): container finished" podID="02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd" containerID="d03e02353f53bff27a97c3cd5bc469e6d16cdd4c45e9a3339a31425abecbbcb4" exitCode=0 Dec 02 17:53:46 crc kubenswrapper[4747]: I1202 17:53:46.910208 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dkhsn" event={"ID":"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd","Type":"ContainerDied","Data":"d03e02353f53bff27a97c3cd5bc469e6d16cdd4c45e9a3339a31425abecbbcb4"} Dec 02 17:53:46 crc kubenswrapper[4747]: I1202 17:53:46.929195 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p986s" event={"ID":"8c7522fe-816b-4fe5-9b3e-7c083555c73b","Type":"ContainerStarted","Data":"a1ed87809b1e46a5f4b11ec244b491a507b04a1a8181fe2ddf061c9061976efa"} Dec 02 17:53:46 crc kubenswrapper[4747]: I1202 17:53:46.983444 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_de9ec28c-1521-4af6-8473-fe8bf1cabf5d/galera/0.log" Dec 02 17:53:47 crc kubenswrapper[4747]: I1202 17:53:47.215422 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c1e65482-6e30-4e82-8c20-5fd991675dba/mysql-bootstrap/0.log" Dec 02 17:53:47 crc kubenswrapper[4747]: I1202 17:53:47.454023 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c1e65482-6e30-4e82-8c20-5fd991675dba/galera/0.log" Dec 02 17:53:47 crc kubenswrapper[4747]: I1202 17:53:47.499199 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c1e65482-6e30-4e82-8c20-5fd991675dba/mysql-bootstrap/0.log" Dec 02 17:53:47 crc kubenswrapper[4747]: I1202 17:53:47.637552 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_4236f6d8-0d70-4768-8eb5-3847fafdede8/nova-metadata-metadata/0.log" Dec 02 17:53:47 crc kubenswrapper[4747]: I1202 17:53:47.643448 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_af70033c-2a14-481a-a85d-3063c09611fd/openstackclient/0.log" Dec 02 17:53:47 crc kubenswrapper[4747]: I1202 17:53:47.710153 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-lqt7r_ecb5b4a2-0d03-464e-87f0-7bbb1cc6f020/ovn-controller/0.log" Dec 02 17:53:47 crc kubenswrapper[4747]: I1202 17:53:47.892099 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-47dr4_0e28eacf-a679-45a6-9ab1-065400244faf/openstack-network-exporter/0.log" Dec 02 17:53:47 crc kubenswrapper[4747]: I1202 17:53:47.943176 4747 generic.go:334] "Generic (PLEG): container finished" podID="8c7522fe-816b-4fe5-9b3e-7c083555c73b" containerID="a1ed87809b1e46a5f4b11ec244b491a507b04a1a8181fe2ddf061c9061976efa" exitCode=0 Dec 02 17:53:47 crc kubenswrapper[4747]: I1202 17:53:47.943220 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p986s" event={"ID":"8c7522fe-816b-4fe5-9b3e-7c083555c73b","Type":"ContainerDied","Data":"a1ed87809b1e46a5f4b11ec244b491a507b04a1a8181fe2ddf061c9061976efa"} Dec 02 17:53:47 crc kubenswrapper[4747]: I1202 17:53:47.999830 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hbnrp_46dfbb1c-1b65-4f5d-9087-94743cb4c00e/ovsdb-server-init/0.log" Dec 02 17:53:48 crc kubenswrapper[4747]: I1202 17:53:48.771755 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hbnrp_46dfbb1c-1b65-4f5d-9087-94743cb4c00e/ovs-vswitchd/0.log" Dec 02 17:53:48 crc kubenswrapper[4747]: I1202 17:53:48.778526 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hbnrp_46dfbb1c-1b65-4f5d-9087-94743cb4c00e/ovsdb-server-init/0.log" Dec 02 17:53:48 crc kubenswrapper[4747]: I1202 17:53:48.779471 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hbnrp_46dfbb1c-1b65-4f5d-9087-94743cb4c00e/ovsdb-server/0.log" Dec 02 17:53:48 crc kubenswrapper[4747]: I1202 17:53:48.955829 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-rml7b_5a3d9657-8162-4edb-88bc-af303d558b2c/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:53:49 crc kubenswrapper[4747]: I1202 17:53:49.024503 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_1a525864-cf44-4b73-bc28-7bda185cd5f8/ovn-northd/0.log" Dec 02 17:53:49 crc kubenswrapper[4747]: I1202 17:53:49.074613 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_1a525864-cf44-4b73-bc28-7bda185cd5f8/openstack-network-exporter/0.log" Dec 02 17:53:49 crc kubenswrapper[4747]: I1202 17:53:49.289691 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_eb9eddf0-4be4-48f8-bcfe-083310ec9333/openstack-network-exporter/0.log" Dec 02 17:53:49 crc kubenswrapper[4747]: I1202 17:53:49.383346 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_eb9eddf0-4be4-48f8-bcfe-083310ec9333/ovsdbserver-nb/0.log" Dec 02 17:53:49 crc kubenswrapper[4747]: I1202 17:53:49.478495 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_5a8b10db-030f-4419-9fa8-c500ae646151/ovsdbserver-sb/0.log" Dec 02 17:53:49 crc kubenswrapper[4747]: I1202 17:53:49.498971 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_5a8b10db-030f-4419-9fa8-c500ae646151/openstack-network-exporter/0.log" Dec 02 17:53:49 crc kubenswrapper[4747]: I1202 17:53:49.755388 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6b8f479dd4-zx826_2efad855-47a7-443a-9b34-0f4137d526e0/placement-api/0.log" Dec 02 17:53:49 crc kubenswrapper[4747]: I1202 17:53:49.867376 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6b8f479dd4-zx826_2efad855-47a7-443a-9b34-0f4137d526e0/placement-log/0.log" Dec 02 17:53:49 crc kubenswrapper[4747]: I1202 17:53:49.927425 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_11c3b905-4c74-439b-b032-a3234c0bf501/setup-container/0.log" Dec 02 17:53:49 crc kubenswrapper[4747]: I1202 17:53:49.967966 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dkhsn" event={"ID":"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd","Type":"ContainerStarted","Data":"69821d5aec31f62f28790f39148525901e0163f70890c8f31475cb55c534f85b"} Dec 02 17:53:49 crc kubenswrapper[4747]: I1202 17:53:49.972283 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p986s" event={"ID":"8c7522fe-816b-4fe5-9b3e-7c083555c73b","Type":"ContainerStarted","Data":"041b2b22aa9405261c9bf01ddfd8a7f753a59596bba8ca3f4093d2b22bae3cf7"} Dec 02 17:53:50 crc kubenswrapper[4747]: I1202 17:53:50.019643 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-p986s" podStartSLOduration=3.001101808 podStartE2EDuration="6.019624486s" podCreationTimestamp="2025-12-02 17:53:44 +0000 UTC" firstStartedPulling="2025-12-02 17:53:45.888894572 +0000 UTC m=+4256.415783311" lastFinishedPulling="2025-12-02 17:53:48.90741724 +0000 UTC m=+4259.434305989" observedRunningTime="2025-12-02 17:53:50.015476568 +0000 UTC m=+4260.542365337" watchObservedRunningTime="2025-12-02 17:53:50.019624486 +0000 UTC m=+4260.546513235" Dec 02 17:53:50 crc kubenswrapper[4747]: I1202 17:53:50.319205 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_11c3b905-4c74-439b-b032-a3234c0bf501/rabbitmq/0.log" Dec 02 17:53:50 crc kubenswrapper[4747]: I1202 17:53:50.362149 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_29f33a45-0d0b-4654-879e-94098ab4b4c5/setup-container/0.log" Dec 02 17:53:50 crc kubenswrapper[4747]: I1202 17:53:50.380017 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_11c3b905-4c74-439b-b032-a3234c0bf501/setup-container/0.log" Dec 02 17:53:50 crc kubenswrapper[4747]: I1202 17:53:50.444652 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5kj9j" Dec 02 17:53:50 crc kubenswrapper[4747]: I1202 17:53:50.444956 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5kj9j" Dec 02 17:53:50 crc kubenswrapper[4747]: I1202 17:53:50.512588 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5kj9j" Dec 02 17:53:50 crc kubenswrapper[4747]: I1202 17:53:50.745836 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_29f33a45-0d0b-4654-879e-94098ab4b4c5/rabbitmq/0.log" Dec 02 17:53:50 crc kubenswrapper[4747]: I1202 17:53:50.777947 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_29f33a45-0d0b-4654-879e-94098ab4b4c5/setup-container/0.log" Dec 02 17:53:50 crc kubenswrapper[4747]: I1202 17:53:50.789428 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-nqcqv_b7b40fdd-ba3d-48fa-b374-279795ee9a44/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:53:51 crc kubenswrapper[4747]: I1202 17:53:51.029648 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-dbk92_0fb63be9-f6ff-45ad-a564-6e43493ea683/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:53:51 crc kubenswrapper[4747]: I1202 17:53:51.034788 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5kj9j" Dec 02 17:53:51 crc kubenswrapper[4747]: I1202 17:53:51.052220 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-5xr4m_a0a25e12-2008-4db8-9de5-9656b34976e0/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:53:51 crc kubenswrapper[4747]: I1202 17:53:51.278680 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-7sddt_4cd68ea5-c7c5-4cc2-9a6a-29d39a81c790/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:53:51 crc kubenswrapper[4747]: I1202 17:53:51.352522 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-zfq7s_ee78ab8c-165e-4f44-9b66-d74f7aa4d397/ssh-known-hosts-edpm-deployment/0.log" Dec 02 17:53:51 crc kubenswrapper[4747]: I1202 17:53:51.481669 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-57db9b8c79-f9rx5_41bc2800-e0ab-41cf-87a8-4fe9981de2e3/proxy-server/0.log" Dec 02 17:53:51 crc kubenswrapper[4747]: I1202 17:53:51.629065 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-57db9b8c79-f9rx5_41bc2800-e0ab-41cf-87a8-4fe9981de2e3/proxy-httpd/0.log" Dec 02 17:53:51 crc kubenswrapper[4747]: I1202 17:53:51.666754 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-mbqqh_f19a9557-2cbf-4fd9-a4a0-6f13ec2a572b/swift-ring-rebalance/0.log" Dec 02 17:53:51 crc kubenswrapper[4747]: I1202 17:53:51.810351 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/account-auditor/0.log" Dec 02 17:53:51 crc kubenswrapper[4747]: I1202 17:53:51.886966 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/account-reaper/0.log" Dec 02 17:53:51 crc kubenswrapper[4747]: I1202 17:53:51.929737 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/account-replicator/0.log" Dec 02 17:53:52 crc kubenswrapper[4747]: I1202 17:53:52.007582 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/container-auditor/0.log" Dec 02 17:53:52 crc kubenswrapper[4747]: I1202 17:53:52.018286 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/account-server/0.log" Dec 02 17:53:52 crc kubenswrapper[4747]: I1202 17:53:52.121718 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/container-replicator/0.log" Dec 02 17:53:52 crc kubenswrapper[4747]: I1202 17:53:52.154930 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/container-server/0.log" Dec 02 17:53:52 crc kubenswrapper[4747]: I1202 17:53:52.183410 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/container-updater/0.log" Dec 02 17:53:52 crc kubenswrapper[4747]: I1202 17:53:52.208867 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/object-auditor/0.log" Dec 02 17:53:52 crc kubenswrapper[4747]: I1202 17:53:52.306929 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/object-expirer/0.log" Dec 02 17:53:52 crc kubenswrapper[4747]: I1202 17:53:52.401526 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/object-server/0.log" Dec 02 17:53:52 crc kubenswrapper[4747]: I1202 17:53:52.413047 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/object-updater/0.log" Dec 02 17:53:52 crc kubenswrapper[4747]: I1202 17:53:52.417770 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/object-replicator/0.log" Dec 02 17:53:52 crc kubenswrapper[4747]: I1202 17:53:52.511960 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/rsync/0.log" Dec 02 17:53:52 crc kubenswrapper[4747]: I1202 17:53:52.653500 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_6251344f-5dcf-4cc8-ae89-85d58abaf41b/swift-recon-cron/0.log" Dec 02 17:53:52 crc kubenswrapper[4747]: I1202 17:53:52.654821 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-5zkpn_aaa35a26-230d-4226-a19b-776a48b1bf07/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:53:52 crc kubenswrapper[4747]: I1202 17:53:52.901027 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_d1805265-c562-4f13-85ad-330c827b78d6/test-operator-logs-container/0.log" Dec 02 17:53:52 crc kubenswrapper[4747]: I1202 17:53:52.917621 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_d7c276fe-92e7-4429-b6f8-d9488337b369/tempest-tests-tempest-tests-runner/0.log" Dec 02 17:53:53 crc kubenswrapper[4747]: I1202 17:53:53.039445 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5kj9j"] Dec 02 17:53:53 crc kubenswrapper[4747]: I1202 17:53:53.129673 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-l7fct_ca7ca1bc-1500-4246-b79c-6566ec5f0281/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 17:53:53 crc kubenswrapper[4747]: E1202 17:53:53.720738 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod02c8e7a6_5bb4_4a2d_bc0d_2a3605dcb9dd.slice/crio-conmon-69821d5aec31f62f28790f39148525901e0163f70890c8f31475cb55c534f85b.scope\": RecentStats: unable to find data in memory cache]" Dec 02 17:53:54 crc kubenswrapper[4747]: I1202 17:53:54.027642 4747 generic.go:334] "Generic (PLEG): container finished" podID="02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd" containerID="69821d5aec31f62f28790f39148525901e0163f70890c8f31475cb55c534f85b" exitCode=0 Dec 02 17:53:54 crc kubenswrapper[4747]: I1202 17:53:54.027946 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5kj9j" podUID="655aff42-4a4b-493d-a928-04fe52e0feea" containerName="registry-server" containerID="cri-o://60ac3ced6b635231e7de853ca3acbc3e60f0c3f9a27a2620d0994c49c9e078d2" gracePeriod=2 Dec 02 17:53:54 crc kubenswrapper[4747]: I1202 17:53:54.028240 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dkhsn" event={"ID":"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd","Type":"ContainerDied","Data":"69821d5aec31f62f28790f39148525901e0163f70890c8f31475cb55c534f85b"} Dec 02 17:53:54 crc kubenswrapper[4747]: I1202 17:53:54.591934 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-p986s" Dec 02 17:53:54 crc kubenswrapper[4747]: I1202 17:53:54.592026 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-p986s" Dec 02 17:53:54 crc kubenswrapper[4747]: I1202 17:53:54.643590 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-p986s" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.001438 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5kj9j" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.044822 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dkhsn" event={"ID":"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd","Type":"ContainerStarted","Data":"143f4c18c239bc842c1748ca5308b3ec8e1046b16446d72a56ed698a853317d9"} Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.052539 4747 generic.go:334] "Generic (PLEG): container finished" podID="655aff42-4a4b-493d-a928-04fe52e0feea" containerID="60ac3ced6b635231e7de853ca3acbc3e60f0c3f9a27a2620d0994c49c9e078d2" exitCode=0 Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.053350 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5kj9j" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.053506 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5kj9j" event={"ID":"655aff42-4a4b-493d-a928-04fe52e0feea","Type":"ContainerDied","Data":"60ac3ced6b635231e7de853ca3acbc3e60f0c3f9a27a2620d0994c49c9e078d2"} Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.053533 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5kj9j" event={"ID":"655aff42-4a4b-493d-a928-04fe52e0feea","Type":"ContainerDied","Data":"f1b346f30ff5212d460a7c0dc44ff6b8516a75382efb353d75be8b5463d97706"} Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.053548 4747 scope.go:117] "RemoveContainer" containerID="60ac3ced6b635231e7de853ca3acbc3e60f0c3f9a27a2620d0994c49c9e078d2" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.067961 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dkhsn" podStartSLOduration=3.527746398 podStartE2EDuration="11.067942223s" podCreationTimestamp="2025-12-02 17:53:44 +0000 UTC" firstStartedPulling="2025-12-02 17:53:46.916730126 +0000 UTC m=+4257.443618865" lastFinishedPulling="2025-12-02 17:53:54.456925941 +0000 UTC m=+4264.983814690" observedRunningTime="2025-12-02 17:53:55.059460972 +0000 UTC m=+4265.586349721" watchObservedRunningTime="2025-12-02 17:53:55.067942223 +0000 UTC m=+4265.594830972" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.083007 4747 scope.go:117] "RemoveContainer" containerID="66abbb618caff754096dbe5d4bc9713f2e70f1b9c403e6ae6bf216a0a88cfe7c" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.097513 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/655aff42-4a4b-493d-a928-04fe52e0feea-utilities\") pod \"655aff42-4a4b-493d-a928-04fe52e0feea\" (UID: \"655aff42-4a4b-493d-a928-04fe52e0feea\") " Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.097565 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/655aff42-4a4b-493d-a928-04fe52e0feea-catalog-content\") pod \"655aff42-4a4b-493d-a928-04fe52e0feea\" (UID: \"655aff42-4a4b-493d-a928-04fe52e0feea\") " Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.097637 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mq7cz\" (UniqueName: \"kubernetes.io/projected/655aff42-4a4b-493d-a928-04fe52e0feea-kube-api-access-mq7cz\") pod \"655aff42-4a4b-493d-a928-04fe52e0feea\" (UID: \"655aff42-4a4b-493d-a928-04fe52e0feea\") " Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.100798 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/655aff42-4a4b-493d-a928-04fe52e0feea-utilities" (OuterVolumeSpecName: "utilities") pod "655aff42-4a4b-493d-a928-04fe52e0feea" (UID: "655aff42-4a4b-493d-a928-04fe52e0feea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.106311 4747 scope.go:117] "RemoveContainer" containerID="b90309f236d4be17ad41fc311d26e30f835402a5cf6df765e136eeb4e7c48888" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.125929 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-p986s" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.136443 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/655aff42-4a4b-493d-a928-04fe52e0feea-kube-api-access-mq7cz" (OuterVolumeSpecName: "kube-api-access-mq7cz") pod "655aff42-4a4b-493d-a928-04fe52e0feea" (UID: "655aff42-4a4b-493d-a928-04fe52e0feea"). InnerVolumeSpecName "kube-api-access-mq7cz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.199302 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/655aff42-4a4b-493d-a928-04fe52e0feea-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.199338 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mq7cz\" (UniqueName: \"kubernetes.io/projected/655aff42-4a4b-493d-a928-04fe52e0feea-kube-api-access-mq7cz\") on node \"crc\" DevicePath \"\"" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.202156 4747 scope.go:117] "RemoveContainer" containerID="60ac3ced6b635231e7de853ca3acbc3e60f0c3f9a27a2620d0994c49c9e078d2" Dec 02 17:53:55 crc kubenswrapper[4747]: E1202 17:53:55.204056 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60ac3ced6b635231e7de853ca3acbc3e60f0c3f9a27a2620d0994c49c9e078d2\": container with ID starting with 60ac3ced6b635231e7de853ca3acbc3e60f0c3f9a27a2620d0994c49c9e078d2 not found: ID does not exist" containerID="60ac3ced6b635231e7de853ca3acbc3e60f0c3f9a27a2620d0994c49c9e078d2" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.204088 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60ac3ced6b635231e7de853ca3acbc3e60f0c3f9a27a2620d0994c49c9e078d2"} err="failed to get container status \"60ac3ced6b635231e7de853ca3acbc3e60f0c3f9a27a2620d0994c49c9e078d2\": rpc error: code = NotFound desc = could not find container \"60ac3ced6b635231e7de853ca3acbc3e60f0c3f9a27a2620d0994c49c9e078d2\": container with ID starting with 60ac3ced6b635231e7de853ca3acbc3e60f0c3f9a27a2620d0994c49c9e078d2 not found: ID does not exist" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.204123 4747 scope.go:117] "RemoveContainer" containerID="66abbb618caff754096dbe5d4bc9713f2e70f1b9c403e6ae6bf216a0a88cfe7c" Dec 02 17:53:55 crc kubenswrapper[4747]: E1202 17:53:55.204513 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66abbb618caff754096dbe5d4bc9713f2e70f1b9c403e6ae6bf216a0a88cfe7c\": container with ID starting with 66abbb618caff754096dbe5d4bc9713f2e70f1b9c403e6ae6bf216a0a88cfe7c not found: ID does not exist" containerID="66abbb618caff754096dbe5d4bc9713f2e70f1b9c403e6ae6bf216a0a88cfe7c" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.204544 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66abbb618caff754096dbe5d4bc9713f2e70f1b9c403e6ae6bf216a0a88cfe7c"} err="failed to get container status \"66abbb618caff754096dbe5d4bc9713f2e70f1b9c403e6ae6bf216a0a88cfe7c\": rpc error: code = NotFound desc = could not find container \"66abbb618caff754096dbe5d4bc9713f2e70f1b9c403e6ae6bf216a0a88cfe7c\": container with ID starting with 66abbb618caff754096dbe5d4bc9713f2e70f1b9c403e6ae6bf216a0a88cfe7c not found: ID does not exist" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.204559 4747 scope.go:117] "RemoveContainer" containerID="b90309f236d4be17ad41fc311d26e30f835402a5cf6df765e136eeb4e7c48888" Dec 02 17:53:55 crc kubenswrapper[4747]: E1202 17:53:55.204879 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b90309f236d4be17ad41fc311d26e30f835402a5cf6df765e136eeb4e7c48888\": container with ID starting with b90309f236d4be17ad41fc311d26e30f835402a5cf6df765e136eeb4e7c48888 not found: ID does not exist" containerID="b90309f236d4be17ad41fc311d26e30f835402a5cf6df765e136eeb4e7c48888" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.204960 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b90309f236d4be17ad41fc311d26e30f835402a5cf6df765e136eeb4e7c48888"} err="failed to get container status \"b90309f236d4be17ad41fc311d26e30f835402a5cf6df765e136eeb4e7c48888\": rpc error: code = NotFound desc = could not find container \"b90309f236d4be17ad41fc311d26e30f835402a5cf6df765e136eeb4e7c48888\": container with ID starting with b90309f236d4be17ad41fc311d26e30f835402a5cf6df765e136eeb4e7c48888 not found: ID does not exist" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.219100 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/655aff42-4a4b-493d-a928-04fe52e0feea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "655aff42-4a4b-493d-a928-04fe52e0feea" (UID: "655aff42-4a4b-493d-a928-04fe52e0feea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.225782 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dkhsn" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.225836 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dkhsn" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.300855 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/655aff42-4a4b-493d-a928-04fe52e0feea-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.394651 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5kj9j"] Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.415183 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5kj9j"] Dec 02 17:53:55 crc kubenswrapper[4747]: I1202 17:53:55.774883 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="655aff42-4a4b-493d-a928-04fe52e0feea" path="/var/lib/kubelet/pods/655aff42-4a4b-493d-a928-04fe52e0feea/volumes" Dec 02 17:53:56 crc kubenswrapper[4747]: I1202 17:53:56.269308 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dkhsn" podUID="02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd" containerName="registry-server" probeResult="failure" output=< Dec 02 17:53:56 crc kubenswrapper[4747]: timeout: failed to connect service ":50051" within 1s Dec 02 17:53:56 crc kubenswrapper[4747]: > Dec 02 17:53:57 crc kubenswrapper[4747]: I1202 17:53:57.435378 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p986s"] Dec 02 17:53:57 crc kubenswrapper[4747]: I1202 17:53:57.435624 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-p986s" podUID="8c7522fe-816b-4fe5-9b3e-7c083555c73b" containerName="registry-server" containerID="cri-o://041b2b22aa9405261c9bf01ddfd8a7f753a59596bba8ca3f4093d2b22bae3cf7" gracePeriod=2 Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.044248 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p986s" Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.052688 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c7522fe-816b-4fe5-9b3e-7c083555c73b-catalog-content\") pod \"8c7522fe-816b-4fe5-9b3e-7c083555c73b\" (UID: \"8c7522fe-816b-4fe5-9b3e-7c083555c73b\") " Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.052851 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c7522fe-816b-4fe5-9b3e-7c083555c73b-utilities\") pod \"8c7522fe-816b-4fe5-9b3e-7c083555c73b\" (UID: \"8c7522fe-816b-4fe5-9b3e-7c083555c73b\") " Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.052994 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2pld\" (UniqueName: \"kubernetes.io/projected/8c7522fe-816b-4fe5-9b3e-7c083555c73b-kube-api-access-n2pld\") pod \"8c7522fe-816b-4fe5-9b3e-7c083555c73b\" (UID: \"8c7522fe-816b-4fe5-9b3e-7c083555c73b\") " Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.053719 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c7522fe-816b-4fe5-9b3e-7c083555c73b-utilities" (OuterVolumeSpecName: "utilities") pod "8c7522fe-816b-4fe5-9b3e-7c083555c73b" (UID: "8c7522fe-816b-4fe5-9b3e-7c083555c73b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.066494 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c7522fe-816b-4fe5-9b3e-7c083555c73b-kube-api-access-n2pld" (OuterVolumeSpecName: "kube-api-access-n2pld") pod "8c7522fe-816b-4fe5-9b3e-7c083555c73b" (UID: "8c7522fe-816b-4fe5-9b3e-7c083555c73b"). InnerVolumeSpecName "kube-api-access-n2pld". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.073924 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c7522fe-816b-4fe5-9b3e-7c083555c73b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8c7522fe-816b-4fe5-9b3e-7c083555c73b" (UID: "8c7522fe-816b-4fe5-9b3e-7c083555c73b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.079835 4747 generic.go:334] "Generic (PLEG): container finished" podID="8c7522fe-816b-4fe5-9b3e-7c083555c73b" containerID="041b2b22aa9405261c9bf01ddfd8a7f753a59596bba8ca3f4093d2b22bae3cf7" exitCode=0 Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.079879 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p986s" event={"ID":"8c7522fe-816b-4fe5-9b3e-7c083555c73b","Type":"ContainerDied","Data":"041b2b22aa9405261c9bf01ddfd8a7f753a59596bba8ca3f4093d2b22bae3cf7"} Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.079918 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p986s" event={"ID":"8c7522fe-816b-4fe5-9b3e-7c083555c73b","Type":"ContainerDied","Data":"d3b984be645e4b40967bee53282f9487d0b8ab594d2b4a668a0c8538e9f6a6e9"} Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.079938 4747 scope.go:117] "RemoveContainer" containerID="041b2b22aa9405261c9bf01ddfd8a7f753a59596bba8ca3f4093d2b22bae3cf7" Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.080114 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p986s" Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.121188 4747 scope.go:117] "RemoveContainer" containerID="a1ed87809b1e46a5f4b11ec244b491a507b04a1a8181fe2ddf061c9061976efa" Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.135946 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p986s"] Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.151745 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-p986s"] Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.155231 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c7522fe-816b-4fe5-9b3e-7c083555c73b-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.155261 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2pld\" (UniqueName: \"kubernetes.io/projected/8c7522fe-816b-4fe5-9b3e-7c083555c73b-kube-api-access-n2pld\") on node \"crc\" DevicePath \"\"" Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.155271 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c7522fe-816b-4fe5-9b3e-7c083555c73b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.165213 4747 scope.go:117] "RemoveContainer" containerID="51f5757a96e980947f0fed28d9f30209f49c89724425559ac73a62e7289d30dd" Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.190476 4747 scope.go:117] "RemoveContainer" containerID="041b2b22aa9405261c9bf01ddfd8a7f753a59596bba8ca3f4093d2b22bae3cf7" Dec 02 17:53:58 crc kubenswrapper[4747]: E1202 17:53:58.190805 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"041b2b22aa9405261c9bf01ddfd8a7f753a59596bba8ca3f4093d2b22bae3cf7\": container with ID starting with 041b2b22aa9405261c9bf01ddfd8a7f753a59596bba8ca3f4093d2b22bae3cf7 not found: ID does not exist" containerID="041b2b22aa9405261c9bf01ddfd8a7f753a59596bba8ca3f4093d2b22bae3cf7" Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.190925 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"041b2b22aa9405261c9bf01ddfd8a7f753a59596bba8ca3f4093d2b22bae3cf7"} err="failed to get container status \"041b2b22aa9405261c9bf01ddfd8a7f753a59596bba8ca3f4093d2b22bae3cf7\": rpc error: code = NotFound desc = could not find container \"041b2b22aa9405261c9bf01ddfd8a7f753a59596bba8ca3f4093d2b22bae3cf7\": container with ID starting with 041b2b22aa9405261c9bf01ddfd8a7f753a59596bba8ca3f4093d2b22bae3cf7 not found: ID does not exist" Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.191010 4747 scope.go:117] "RemoveContainer" containerID="a1ed87809b1e46a5f4b11ec244b491a507b04a1a8181fe2ddf061c9061976efa" Dec 02 17:53:58 crc kubenswrapper[4747]: E1202 17:53:58.192100 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1ed87809b1e46a5f4b11ec244b491a507b04a1a8181fe2ddf061c9061976efa\": container with ID starting with a1ed87809b1e46a5f4b11ec244b491a507b04a1a8181fe2ddf061c9061976efa not found: ID does not exist" containerID="a1ed87809b1e46a5f4b11ec244b491a507b04a1a8181fe2ddf061c9061976efa" Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.192145 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1ed87809b1e46a5f4b11ec244b491a507b04a1a8181fe2ddf061c9061976efa"} err="failed to get container status \"a1ed87809b1e46a5f4b11ec244b491a507b04a1a8181fe2ddf061c9061976efa\": rpc error: code = NotFound desc = could not find container \"a1ed87809b1e46a5f4b11ec244b491a507b04a1a8181fe2ddf061c9061976efa\": container with ID starting with a1ed87809b1e46a5f4b11ec244b491a507b04a1a8181fe2ddf061c9061976efa not found: ID does not exist" Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.192173 4747 scope.go:117] "RemoveContainer" containerID="51f5757a96e980947f0fed28d9f30209f49c89724425559ac73a62e7289d30dd" Dec 02 17:53:58 crc kubenswrapper[4747]: E1202 17:53:58.192480 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51f5757a96e980947f0fed28d9f30209f49c89724425559ac73a62e7289d30dd\": container with ID starting with 51f5757a96e980947f0fed28d9f30209f49c89724425559ac73a62e7289d30dd not found: ID does not exist" containerID="51f5757a96e980947f0fed28d9f30209f49c89724425559ac73a62e7289d30dd" Dec 02 17:53:58 crc kubenswrapper[4747]: I1202 17:53:58.192521 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51f5757a96e980947f0fed28d9f30209f49c89724425559ac73a62e7289d30dd"} err="failed to get container status \"51f5757a96e980947f0fed28d9f30209f49c89724425559ac73a62e7289d30dd\": rpc error: code = NotFound desc = could not find container \"51f5757a96e980947f0fed28d9f30209f49c89724425559ac73a62e7289d30dd\": container with ID starting with 51f5757a96e980947f0fed28d9f30209f49c89724425559ac73a62e7289d30dd not found: ID does not exist" Dec 02 17:53:59 crc kubenswrapper[4747]: I1202 17:53:59.772796 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c7522fe-816b-4fe5-9b3e-7c083555c73b" path="/var/lib/kubelet/pods/8c7522fe-816b-4fe5-9b3e-7c083555c73b/volumes" Dec 02 17:54:00 crc kubenswrapper[4747]: I1202 17:54:00.260713 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_11fbad36-d913-4a80-b1db-6f9707f8c370/memcached/0.log" Dec 02 17:54:05 crc kubenswrapper[4747]: I1202 17:54:05.302389 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dkhsn" Dec 02 17:54:05 crc kubenswrapper[4747]: I1202 17:54:05.385073 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dkhsn" Dec 02 17:54:05 crc kubenswrapper[4747]: I1202 17:54:05.551952 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dkhsn"] Dec 02 17:54:07 crc kubenswrapper[4747]: I1202 17:54:07.167372 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dkhsn" podUID="02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd" containerName="registry-server" containerID="cri-o://143f4c18c239bc842c1748ca5308b3ec8e1046b16446d72a56ed698a853317d9" gracePeriod=2 Dec 02 17:54:07 crc kubenswrapper[4747]: I1202 17:54:07.618636 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dkhsn" Dec 02 17:54:07 crc kubenswrapper[4747]: I1202 17:54:07.634158 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-utilities\") pod \"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd\" (UID: \"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd\") " Dec 02 17:54:07 crc kubenswrapper[4747]: I1202 17:54:07.634239 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-catalog-content\") pod \"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd\" (UID: \"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd\") " Dec 02 17:54:07 crc kubenswrapper[4747]: I1202 17:54:07.634266 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9tt7\" (UniqueName: \"kubernetes.io/projected/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-kube-api-access-v9tt7\") pod \"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd\" (UID: \"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd\") " Dec 02 17:54:07 crc kubenswrapper[4747]: I1202 17:54:07.635159 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-utilities" (OuterVolumeSpecName: "utilities") pod "02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd" (UID: "02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:54:07 crc kubenswrapper[4747]: I1202 17:54:07.639865 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-kube-api-access-v9tt7" (OuterVolumeSpecName: "kube-api-access-v9tt7") pod "02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd" (UID: "02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd"). InnerVolumeSpecName "kube-api-access-v9tt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:54:07 crc kubenswrapper[4747]: I1202 17:54:07.742013 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:54:07 crc kubenswrapper[4747]: I1202 17:54:07.742041 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9tt7\" (UniqueName: \"kubernetes.io/projected/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-kube-api-access-v9tt7\") on node \"crc\" DevicePath \"\"" Dec 02 17:54:07 crc kubenswrapper[4747]: I1202 17:54:07.761780 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd" (UID: "02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:54:07 crc kubenswrapper[4747]: I1202 17:54:07.843858 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:54:08 crc kubenswrapper[4747]: I1202 17:54:08.175975 4747 generic.go:334] "Generic (PLEG): container finished" podID="02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd" containerID="143f4c18c239bc842c1748ca5308b3ec8e1046b16446d72a56ed698a853317d9" exitCode=0 Dec 02 17:54:08 crc kubenswrapper[4747]: I1202 17:54:08.176017 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dkhsn" event={"ID":"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd","Type":"ContainerDied","Data":"143f4c18c239bc842c1748ca5308b3ec8e1046b16446d72a56ed698a853317d9"} Dec 02 17:54:08 crc kubenswrapper[4747]: I1202 17:54:08.176045 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dkhsn" event={"ID":"02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd","Type":"ContainerDied","Data":"e5dc5e24f7294e9f46fb67600e97aff9ee71c1bea3d1b15e171d3551ea9b47a9"} Dec 02 17:54:08 crc kubenswrapper[4747]: I1202 17:54:08.176062 4747 scope.go:117] "RemoveContainer" containerID="143f4c18c239bc842c1748ca5308b3ec8e1046b16446d72a56ed698a853317d9" Dec 02 17:54:08 crc kubenswrapper[4747]: I1202 17:54:08.176180 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dkhsn" Dec 02 17:54:08 crc kubenswrapper[4747]: I1202 17:54:08.196779 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dkhsn"] Dec 02 17:54:08 crc kubenswrapper[4747]: I1202 17:54:08.201537 4747 scope.go:117] "RemoveContainer" containerID="69821d5aec31f62f28790f39148525901e0163f70890c8f31475cb55c534f85b" Dec 02 17:54:08 crc kubenswrapper[4747]: I1202 17:54:08.205457 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dkhsn"] Dec 02 17:54:08 crc kubenswrapper[4747]: I1202 17:54:08.224728 4747 scope.go:117] "RemoveContainer" containerID="d03e02353f53bff27a97c3cd5bc469e6d16cdd4c45e9a3339a31425abecbbcb4" Dec 02 17:54:08 crc kubenswrapper[4747]: I1202 17:54:08.267602 4747 scope.go:117] "RemoveContainer" containerID="143f4c18c239bc842c1748ca5308b3ec8e1046b16446d72a56ed698a853317d9" Dec 02 17:54:08 crc kubenswrapper[4747]: E1202 17:54:08.268204 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"143f4c18c239bc842c1748ca5308b3ec8e1046b16446d72a56ed698a853317d9\": container with ID starting with 143f4c18c239bc842c1748ca5308b3ec8e1046b16446d72a56ed698a853317d9 not found: ID does not exist" containerID="143f4c18c239bc842c1748ca5308b3ec8e1046b16446d72a56ed698a853317d9" Dec 02 17:54:08 crc kubenswrapper[4747]: I1202 17:54:08.268259 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"143f4c18c239bc842c1748ca5308b3ec8e1046b16446d72a56ed698a853317d9"} err="failed to get container status \"143f4c18c239bc842c1748ca5308b3ec8e1046b16446d72a56ed698a853317d9\": rpc error: code = NotFound desc = could not find container \"143f4c18c239bc842c1748ca5308b3ec8e1046b16446d72a56ed698a853317d9\": container with ID starting with 143f4c18c239bc842c1748ca5308b3ec8e1046b16446d72a56ed698a853317d9 not found: ID does not exist" Dec 02 17:54:08 crc kubenswrapper[4747]: I1202 17:54:08.268287 4747 scope.go:117] "RemoveContainer" containerID="69821d5aec31f62f28790f39148525901e0163f70890c8f31475cb55c534f85b" Dec 02 17:54:08 crc kubenswrapper[4747]: E1202 17:54:08.268627 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69821d5aec31f62f28790f39148525901e0163f70890c8f31475cb55c534f85b\": container with ID starting with 69821d5aec31f62f28790f39148525901e0163f70890c8f31475cb55c534f85b not found: ID does not exist" containerID="69821d5aec31f62f28790f39148525901e0163f70890c8f31475cb55c534f85b" Dec 02 17:54:08 crc kubenswrapper[4747]: I1202 17:54:08.268719 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69821d5aec31f62f28790f39148525901e0163f70890c8f31475cb55c534f85b"} err="failed to get container status \"69821d5aec31f62f28790f39148525901e0163f70890c8f31475cb55c534f85b\": rpc error: code = NotFound desc = could not find container \"69821d5aec31f62f28790f39148525901e0163f70890c8f31475cb55c534f85b\": container with ID starting with 69821d5aec31f62f28790f39148525901e0163f70890c8f31475cb55c534f85b not found: ID does not exist" Dec 02 17:54:08 crc kubenswrapper[4747]: I1202 17:54:08.268792 4747 scope.go:117] "RemoveContainer" containerID="d03e02353f53bff27a97c3cd5bc469e6d16cdd4c45e9a3339a31425abecbbcb4" Dec 02 17:54:08 crc kubenswrapper[4747]: E1202 17:54:08.269131 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d03e02353f53bff27a97c3cd5bc469e6d16cdd4c45e9a3339a31425abecbbcb4\": container with ID starting with d03e02353f53bff27a97c3cd5bc469e6d16cdd4c45e9a3339a31425abecbbcb4 not found: ID does not exist" containerID="d03e02353f53bff27a97c3cd5bc469e6d16cdd4c45e9a3339a31425abecbbcb4" Dec 02 17:54:08 crc kubenswrapper[4747]: I1202 17:54:08.269207 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d03e02353f53bff27a97c3cd5bc469e6d16cdd4c45e9a3339a31425abecbbcb4"} err="failed to get container status \"d03e02353f53bff27a97c3cd5bc469e6d16cdd4c45e9a3339a31425abecbbcb4\": rpc error: code = NotFound desc = could not find container \"d03e02353f53bff27a97c3cd5bc469e6d16cdd4c45e9a3339a31425abecbbcb4\": container with ID starting with d03e02353f53bff27a97c3cd5bc469e6d16cdd4c45e9a3339a31425abecbbcb4 not found: ID does not exist" Dec 02 17:54:09 crc kubenswrapper[4747]: I1202 17:54:09.778798 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd" path="/var/lib/kubelet/pods/02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd/volumes" Dec 02 17:54:20 crc kubenswrapper[4747]: I1202 17:54:20.789832 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v_3755dfee-851d-47aa-95a3-85dc9da31e53/util/0.log" Dec 02 17:54:21 crc kubenswrapper[4747]: I1202 17:54:21.046854 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v_3755dfee-851d-47aa-95a3-85dc9da31e53/pull/0.log" Dec 02 17:54:21 crc kubenswrapper[4747]: I1202 17:54:21.050694 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v_3755dfee-851d-47aa-95a3-85dc9da31e53/pull/0.log" Dec 02 17:54:21 crc kubenswrapper[4747]: I1202 17:54:21.058503 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v_3755dfee-851d-47aa-95a3-85dc9da31e53/util/0.log" Dec 02 17:54:21 crc kubenswrapper[4747]: I1202 17:54:21.218528 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v_3755dfee-851d-47aa-95a3-85dc9da31e53/util/0.log" Dec 02 17:54:21 crc kubenswrapper[4747]: I1202 17:54:21.253870 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v_3755dfee-851d-47aa-95a3-85dc9da31e53/extract/0.log" Dec 02 17:54:21 crc kubenswrapper[4747]: I1202 17:54:21.254529 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7d567a2bd9fd9d17b01bbe4f234e9733933a1b6d540a54874f3b21c158x4b2v_3755dfee-851d-47aa-95a3-85dc9da31e53/pull/0.log" Dec 02 17:54:21 crc kubenswrapper[4747]: I1202 17:54:21.392317 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-5bfbbb859d-5b75z_02a1a6c9-a064-447d-85b3-61d6de6bba1a/kube-rbac-proxy/0.log" Dec 02 17:54:21 crc kubenswrapper[4747]: I1202 17:54:21.486445 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-5bfbbb859d-5b75z_02a1a6c9-a064-447d-85b3-61d6de6bba1a/manager/0.log" Dec 02 17:54:21 crc kubenswrapper[4747]: I1202 17:54:21.518452 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-748967c98-zjpdf_84349f11-8427-4aa8-ae24-750a6fdc5e78/kube-rbac-proxy/0.log" Dec 02 17:54:21 crc kubenswrapper[4747]: I1202 17:54:21.683604 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-6788cc6d75-nsvrj_b1871043-c496-421e-8055-817652748d46/kube-rbac-proxy/0.log" Dec 02 17:54:21 crc kubenswrapper[4747]: I1202 17:54:21.715760 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-748967c98-zjpdf_84349f11-8427-4aa8-ae24-750a6fdc5e78/manager/0.log" Dec 02 17:54:21 crc kubenswrapper[4747]: I1202 17:54:21.728512 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-6788cc6d75-nsvrj_b1871043-c496-421e-8055-817652748d46/manager/0.log" Dec 02 17:54:21 crc kubenswrapper[4747]: I1202 17:54:21.932289 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-85fbd69fcd-ns9p8_1a8a4d9e-ee5e-4235-bae4-23eb196dac78/kube-rbac-proxy/0.log" Dec 02 17:54:22 crc kubenswrapper[4747]: I1202 17:54:22.008103 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-85fbd69fcd-ns9p8_1a8a4d9e-ee5e-4235-bae4-23eb196dac78/manager/0.log" Dec 02 17:54:22 crc kubenswrapper[4747]: I1202 17:54:22.149300 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-698d6fd7d6-jhjgl_aac0c240-30e1-410d-bab3-b87965dbd297/kube-rbac-proxy/0.log" Dec 02 17:54:22 crc kubenswrapper[4747]: I1202 17:54:22.187854 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-698d6fd7d6-jhjgl_aac0c240-30e1-410d-bab3-b87965dbd297/manager/0.log" Dec 02 17:54:22 crc kubenswrapper[4747]: I1202 17:54:22.219862 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7d5d9fd47f-dhznh_dff222d7-fd30-4d3a-839a-6478da00ef65/kube-rbac-proxy/0.log" Dec 02 17:54:22 crc kubenswrapper[4747]: I1202 17:54:22.345192 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7d5d9fd47f-dhznh_dff222d7-fd30-4d3a-839a-6478da00ef65/manager/0.log" Dec 02 17:54:22 crc kubenswrapper[4747]: I1202 17:54:22.401898 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-6c55d8d69b-f7k5g_eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb/kube-rbac-proxy/0.log" Dec 02 17:54:22 crc kubenswrapper[4747]: I1202 17:54:22.646110 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-54485f899-qjnsp_c637ccff-ec15-453e-9d0b-1e9d013f5f60/manager/0.log" Dec 02 17:54:22 crc kubenswrapper[4747]: I1202 17:54:22.652849 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-6c55d8d69b-f7k5g_eb160d05-1b0b-4b75-ad5f-8c9efd2dbbbb/manager/0.log" Dec 02 17:54:22 crc kubenswrapper[4747]: I1202 17:54:22.668120 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-54485f899-qjnsp_c637ccff-ec15-453e-9d0b-1e9d013f5f60/kube-rbac-proxy/0.log" Dec 02 17:54:22 crc kubenswrapper[4747]: I1202 17:54:22.998561 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-79cc9d59f5-ftf2c_3e91b7b4-9289-4769-83a7-4cd35038aaad/kube-rbac-proxy/0.log" Dec 02 17:54:23 crc kubenswrapper[4747]: I1202 17:54:23.107421 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-79cc9d59f5-ftf2c_3e91b7b4-9289-4769-83a7-4cd35038aaad/manager/0.log" Dec 02 17:54:23 crc kubenswrapper[4747]: I1202 17:54:23.184572 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5cbc8c7f96-f2qv7_3685e169-7bca-47ae-b5bc-5945db4fa054/kube-rbac-proxy/0.log" Dec 02 17:54:23 crc kubenswrapper[4747]: I1202 17:54:23.306589 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5cbc8c7f96-f2qv7_3685e169-7bca-47ae-b5bc-5945db4fa054/manager/0.log" Dec 02 17:54:23 crc kubenswrapper[4747]: I1202 17:54:23.330023 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-64d7c556cd-vkmkn_3f797f20-b787-4429-a862-badf66ed38ea/kube-rbac-proxy/0.log" Dec 02 17:54:23 crc kubenswrapper[4747]: I1202 17:54:23.396262 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-64d7c556cd-vkmkn_3f797f20-b787-4429-a862-badf66ed38ea/manager/0.log" Dec 02 17:54:23 crc kubenswrapper[4747]: I1202 17:54:23.484542 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-58879495c-wg54q_f5edfca9-f892-409d-856c-70e757072464/kube-rbac-proxy/0.log" Dec 02 17:54:23 crc kubenswrapper[4747]: I1202 17:54:23.586521 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-58879495c-wg54q_f5edfca9-f892-409d-856c-70e757072464/manager/0.log" Dec 02 17:54:23 crc kubenswrapper[4747]: I1202 17:54:23.654073 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79d658b66d-tkgmm_11014c8f-0b84-470f-aaf8-0d029800d594/kube-rbac-proxy/0.log" Dec 02 17:54:23 crc kubenswrapper[4747]: I1202 17:54:23.739300 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-79d658b66d-tkgmm_11014c8f-0b84-470f-aaf8-0d029800d594/manager/0.log" Dec 02 17:54:23 crc kubenswrapper[4747]: I1202 17:54:23.817272 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-d5fb87cb8-8z6z7_93ff580d-18b2-4e1f-af0b-f2bd36b1e0db/kube-rbac-proxy/0.log" Dec 02 17:54:23 crc kubenswrapper[4747]: I1202 17:54:23.875192 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-d5fb87cb8-8z6z7_93ff580d-18b2-4e1f-af0b-f2bd36b1e0db/manager/0.log" Dec 02 17:54:24 crc kubenswrapper[4747]: I1202 17:54:24.002782 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-77868f484-n5tnz_50fe2539-78c5-4fde-9554-30143fdc520f/kube-rbac-proxy/0.log" Dec 02 17:54:24 crc kubenswrapper[4747]: I1202 17:54:24.012624 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-77868f484-n5tnz_50fe2539-78c5-4fde-9554-30143fdc520f/manager/0.log" Dec 02 17:54:24 crc kubenswrapper[4747]: I1202 17:54:24.134881 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-78568b558-x86lp_882ad3dc-b8df-4107-855a-bdf7dc10786b/kube-rbac-proxy/0.log" Dec 02 17:54:24 crc kubenswrapper[4747]: I1202 17:54:24.907782 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5d4dbc7dd5-9bkrr_8983941a-65e0-4b84-9b02-38dc34133b0f/kube-rbac-proxy/0.log" Dec 02 17:54:25 crc kubenswrapper[4747]: I1202 17:54:25.054810 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5d4dbc7dd5-9bkrr_8983941a-65e0-4b84-9b02-38dc34133b0f/operator/0.log" Dec 02 17:54:25 crc kubenswrapper[4747]: I1202 17:54:25.143539 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-br7nx_17f243ad-ab56-4cd4-9c45-1c665320e93d/registry-server/0.log" Dec 02 17:54:25 crc kubenswrapper[4747]: I1202 17:54:25.215297 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-5b67cfc8fb-j9r8v_d539442d-27c5-4383-9a11-589905951e21/kube-rbac-proxy/0.log" Dec 02 17:54:25 crc kubenswrapper[4747]: I1202 17:54:25.366177 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-5b67cfc8fb-j9r8v_d539442d-27c5-4383-9a11-589905951e21/manager/0.log" Dec 02 17:54:25 crc kubenswrapper[4747]: I1202 17:54:25.412471 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-867d87977b-kpccv_c8dc257d-bb42-43c0-b784-483ccb97f95f/kube-rbac-proxy/0.log" Dec 02 17:54:25 crc kubenswrapper[4747]: I1202 17:54:25.482751 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-867d87977b-kpccv_c8dc257d-bb42-43c0-b784-483ccb97f95f/manager/0.log" Dec 02 17:54:25 crc kubenswrapper[4747]: I1202 17:54:25.540646 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-rp4gg_75bdef4f-9dfa-4699-9cea-b2804869c8ef/operator/0.log" Dec 02 17:54:25 crc kubenswrapper[4747]: I1202 17:54:25.678365 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-8f6687c44-lkxqf_29996641-038c-4bb4-8ed8-4cc853ab4369/kube-rbac-proxy/0.log" Dec 02 17:54:25 crc kubenswrapper[4747]: I1202 17:54:25.809354 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-8f6687c44-lkxqf_29996641-038c-4bb4-8ed8-4cc853ab4369/manager/0.log" Dec 02 17:54:25 crc kubenswrapper[4747]: I1202 17:54:25.814819 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-78568b558-x86lp_882ad3dc-b8df-4107-855a-bdf7dc10786b/manager/0.log" Dec 02 17:54:25 crc kubenswrapper[4747]: I1202 17:54:25.893048 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-695797c565-mwsvx_6ea9539a-d252-4870-bdbf-4bc6d033840c/kube-rbac-proxy/0.log" Dec 02 17:54:25 crc kubenswrapper[4747]: I1202 17:54:25.957133 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-695797c565-mwsvx_6ea9539a-d252-4870-bdbf-4bc6d033840c/manager/0.log" Dec 02 17:54:26 crc kubenswrapper[4747]: I1202 17:54:26.009637 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-bb86466d8-zzcts_e1603c18-0e89-40a6-bb71-549cd8db07c6/kube-rbac-proxy/0.log" Dec 02 17:54:26 crc kubenswrapper[4747]: I1202 17:54:26.010771 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-bb86466d8-zzcts_e1603c18-0e89-40a6-bb71-549cd8db07c6/manager/0.log" Dec 02 17:54:27 crc kubenswrapper[4747]: I1202 17:54:27.441669 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6b56b8849f-swpq5_1183ccf7-36cd-41ee-96d7-cb7272989af0/kube-rbac-proxy/0.log" Dec 02 17:54:27 crc kubenswrapper[4747]: I1202 17:54:27.490420 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6b56b8849f-swpq5_1183ccf7-36cd-41ee-96d7-cb7272989af0/manager/0.log" Dec 02 17:54:31 crc kubenswrapper[4747]: I1202 17:54:31.795071 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:54:31 crc kubenswrapper[4747]: I1202 17:54:31.795622 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:54:45 crc kubenswrapper[4747]: I1202 17:54:45.954350 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-2lxh4_2f1dd59a-4931-4cc0-8105-f036e8e2f72f/control-plane-machine-set-operator/0.log" Dec 02 17:54:46 crc kubenswrapper[4747]: I1202 17:54:46.133621 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dn6td_9330ef08-f76f-4166-a6c0-a3275375c9f1/machine-api-operator/0.log" Dec 02 17:54:46 crc kubenswrapper[4747]: I1202 17:54:46.137257 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dn6td_9330ef08-f76f-4166-a6c0-a3275375c9f1/kube-rbac-proxy/0.log" Dec 02 17:55:01 crc kubenswrapper[4747]: I1202 17:55:01.387488 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-b9r5p_1e49c6c3-130a-496a-88ae-6c4f9e1c8ea9/cert-manager-controller/0.log" Dec 02 17:55:01 crc kubenswrapper[4747]: I1202 17:55:01.506884 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-vr84r_2c58e5fb-8d6a-4102-9191-ca69fb40e3bd/cert-manager-cainjector/0.log" Dec 02 17:55:01 crc kubenswrapper[4747]: I1202 17:55:01.566823 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-qb975_9ee35e2a-2967-4c76-92ec-2a7b5db87ba7/cert-manager-webhook/0.log" Dec 02 17:55:01 crc kubenswrapper[4747]: I1202 17:55:01.794830 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:55:01 crc kubenswrapper[4747]: I1202 17:55:01.795159 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:55:16 crc kubenswrapper[4747]: I1202 17:55:16.264728 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-c7bcn_2b678030-cb69-4a68-ab2f-140d36283f19/nmstate-console-plugin/0.log" Dec 02 17:55:16 crc kubenswrapper[4747]: I1202 17:55:16.443462 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-f556t_cb597834-dde7-4bcb-b267-22aae223b524/kube-rbac-proxy/0.log" Dec 02 17:55:16 crc kubenswrapper[4747]: I1202 17:55:16.471114 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-f556t_cb597834-dde7-4bcb-b267-22aae223b524/nmstate-metrics/0.log" Dec 02 17:55:16 crc kubenswrapper[4747]: I1202 17:55:16.480111 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-pj45b_00a29ad2-51bd-409c-afc3-ccd42b113c68/nmstate-handler/0.log" Dec 02 17:55:16 crc kubenswrapper[4747]: I1202 17:55:16.687015 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-8sr7z_b1fa5588-eede-4bb2-8b51-98f2582557bd/nmstate-operator/0.log" Dec 02 17:55:16 crc kubenswrapper[4747]: I1202 17:55:16.701011 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-lxrq8_6659cefd-449d-4eb0-a02e-d118b586ebac/nmstate-webhook/0.log" Dec 02 17:55:31 crc kubenswrapper[4747]: I1202 17:55:31.795378 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:55:31 crc kubenswrapper[4747]: I1202 17:55:31.796073 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:55:31 crc kubenswrapper[4747]: I1202 17:55:31.796144 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 17:55:31 crc kubenswrapper[4747]: I1202 17:55:31.797165 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1d8f0c8a99109d44b7610ef8a9fe27391714d9f2d4fa74ebb42d42c2897c3ea2"} pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 17:55:31 crc kubenswrapper[4747]: I1202 17:55:31.797279 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" containerID="cri-o://1d8f0c8a99109d44b7610ef8a9fe27391714d9f2d4fa74ebb42d42c2897c3ea2" gracePeriod=600 Dec 02 17:55:32 crc kubenswrapper[4747]: I1202 17:55:32.665623 4747 generic.go:334] "Generic (PLEG): container finished" podID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerID="1d8f0c8a99109d44b7610ef8a9fe27391714d9f2d4fa74ebb42d42c2897c3ea2" exitCode=0 Dec 02 17:55:32 crc kubenswrapper[4747]: I1202 17:55:32.665834 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerDied","Data":"1d8f0c8a99109d44b7610ef8a9fe27391714d9f2d4fa74ebb42d42c2897c3ea2"} Dec 02 17:55:32 crc kubenswrapper[4747]: I1202 17:55:32.666091 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerStarted","Data":"7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77"} Dec 02 17:55:32 crc kubenswrapper[4747]: I1202 17:55:32.666108 4747 scope.go:117] "RemoveContainer" containerID="6942ee9095fbc72ba897a72a1f9a3f8da23a8e31e393ecfecacc43d23fd1c511" Dec 02 17:55:33 crc kubenswrapper[4747]: I1202 17:55:33.380735 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-hkpjf_3dd4af5f-feb2-49f2-ade6-35794a19f8b8/kube-rbac-proxy/0.log" Dec 02 17:55:33 crc kubenswrapper[4747]: I1202 17:55:33.545700 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-hkpjf_3dd4af5f-feb2-49f2-ade6-35794a19f8b8/controller/0.log" Dec 02 17:55:33 crc kubenswrapper[4747]: I1202 17:55:33.552183 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-frr-files/0.log" Dec 02 17:55:33 crc kubenswrapper[4747]: I1202 17:55:33.769795 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-frr-files/0.log" Dec 02 17:55:33 crc kubenswrapper[4747]: I1202 17:55:33.773772 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-reloader/0.log" Dec 02 17:55:33 crc kubenswrapper[4747]: I1202 17:55:33.795371 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-metrics/0.log" Dec 02 17:55:33 crc kubenswrapper[4747]: I1202 17:55:33.795968 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-reloader/0.log" Dec 02 17:55:33 crc kubenswrapper[4747]: I1202 17:55:33.922694 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-frr-files/0.log" Dec 02 17:55:33 crc kubenswrapper[4747]: I1202 17:55:33.955742 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-reloader/0.log" Dec 02 17:55:33 crc kubenswrapper[4747]: I1202 17:55:33.965353 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-metrics/0.log" Dec 02 17:55:33 crc kubenswrapper[4747]: I1202 17:55:33.968751 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-metrics/0.log" Dec 02 17:55:34 crc kubenswrapper[4747]: I1202 17:55:34.165496 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-frr-files/0.log" Dec 02 17:55:34 crc kubenswrapper[4747]: I1202 17:55:34.178184 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-metrics/0.log" Dec 02 17:55:34 crc kubenswrapper[4747]: I1202 17:55:34.190511 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/cp-reloader/0.log" Dec 02 17:55:34 crc kubenswrapper[4747]: I1202 17:55:34.211512 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/controller/0.log" Dec 02 17:55:34 crc kubenswrapper[4747]: I1202 17:55:34.338832 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/frr-metrics/0.log" Dec 02 17:55:34 crc kubenswrapper[4747]: I1202 17:55:34.382164 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/kube-rbac-proxy/0.log" Dec 02 17:55:34 crc kubenswrapper[4747]: I1202 17:55:34.413685 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/kube-rbac-proxy-frr/0.log" Dec 02 17:55:34 crc kubenswrapper[4747]: I1202 17:55:34.536741 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/reloader/0.log" Dec 02 17:55:34 crc kubenswrapper[4747]: I1202 17:55:34.649833 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-llx6b_35b75b4b-4452-4e1b-8571-fbafb78b130b/frr-k8s-webhook-server/0.log" Dec 02 17:55:34 crc kubenswrapper[4747]: I1202 17:55:34.875811 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-98db7dd6f-hftd4_5e16dbd4-9db6-47d1-bfc3-549dbb8067e9/manager/0.log" Dec 02 17:55:34 crc kubenswrapper[4747]: I1202 17:55:34.967424 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5448bdf48d-p2qrm_c15d13db-952d-4732-9d31-9ba5e926796c/webhook-server/0.log" Dec 02 17:55:35 crc kubenswrapper[4747]: I1202 17:55:35.104995 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7vlkv_b7db8655-8adb-4345-b69a-574f2fbbffcb/kube-rbac-proxy/0.log" Dec 02 17:55:35 crc kubenswrapper[4747]: I1202 17:55:35.536603 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7vlkv_b7db8655-8adb-4345-b69a-574f2fbbffcb/speaker/0.log" Dec 02 17:55:35 crc kubenswrapper[4747]: I1202 17:55:35.698686 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p4tng_b1b3c8a9-1af8-495f-9e0b-397c645925bd/frr/0.log" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.177959 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vwlbb"] Dec 02 17:55:51 crc kubenswrapper[4747]: E1202 17:55:51.180770 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd" containerName="extract-content" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.180789 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd" containerName="extract-content" Dec 02 17:55:51 crc kubenswrapper[4747]: E1202 17:55:51.180799 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd" containerName="extract-utilities" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.180806 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd" containerName="extract-utilities" Dec 02 17:55:51 crc kubenswrapper[4747]: E1202 17:55:51.180814 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c7522fe-816b-4fe5-9b3e-7c083555c73b" containerName="extract-utilities" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.180820 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c7522fe-816b-4fe5-9b3e-7c083555c73b" containerName="extract-utilities" Dec 02 17:55:51 crc kubenswrapper[4747]: E1202 17:55:51.180830 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="655aff42-4a4b-493d-a928-04fe52e0feea" containerName="extract-utilities" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.180836 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="655aff42-4a4b-493d-a928-04fe52e0feea" containerName="extract-utilities" Dec 02 17:55:51 crc kubenswrapper[4747]: E1202 17:55:51.180843 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd" containerName="registry-server" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.180849 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd" containerName="registry-server" Dec 02 17:55:51 crc kubenswrapper[4747]: E1202 17:55:51.180861 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="655aff42-4a4b-493d-a928-04fe52e0feea" containerName="extract-content" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.180867 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="655aff42-4a4b-493d-a928-04fe52e0feea" containerName="extract-content" Dec 02 17:55:51 crc kubenswrapper[4747]: E1202 17:55:51.180883 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c7522fe-816b-4fe5-9b3e-7c083555c73b" containerName="registry-server" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.180889 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c7522fe-816b-4fe5-9b3e-7c083555c73b" containerName="registry-server" Dec 02 17:55:51 crc kubenswrapper[4747]: E1202 17:55:51.180920 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="655aff42-4a4b-493d-a928-04fe52e0feea" containerName="registry-server" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.180926 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="655aff42-4a4b-493d-a928-04fe52e0feea" containerName="registry-server" Dec 02 17:55:51 crc kubenswrapper[4747]: E1202 17:55:51.180939 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c7522fe-816b-4fe5-9b3e-7c083555c73b" containerName="extract-content" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.180945 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c7522fe-816b-4fe5-9b3e-7c083555c73b" containerName="extract-content" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.183054 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="02c8e7a6-5bb4-4a2d-bc0d-2a3605dcb9dd" containerName="registry-server" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.183089 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="655aff42-4a4b-493d-a928-04fe52e0feea" containerName="registry-server" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.183119 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c7522fe-816b-4fe5-9b3e-7c083555c73b" containerName="registry-server" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.196084 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vwlbb" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.261231 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vwlbb"] Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.273620 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vkqx\" (UniqueName: \"kubernetes.io/projected/514b35ec-b107-409a-a19d-dd3f2bdbb575-kube-api-access-8vkqx\") pod \"community-operators-vwlbb\" (UID: \"514b35ec-b107-409a-a19d-dd3f2bdbb575\") " pod="openshift-marketplace/community-operators-vwlbb" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.273694 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/514b35ec-b107-409a-a19d-dd3f2bdbb575-catalog-content\") pod \"community-operators-vwlbb\" (UID: \"514b35ec-b107-409a-a19d-dd3f2bdbb575\") " pod="openshift-marketplace/community-operators-vwlbb" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.273816 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/514b35ec-b107-409a-a19d-dd3f2bdbb575-utilities\") pod \"community-operators-vwlbb\" (UID: \"514b35ec-b107-409a-a19d-dd3f2bdbb575\") " pod="openshift-marketplace/community-operators-vwlbb" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.359949 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs_b34ed569-9800-4521-bbd9-7e4249513755/util/0.log" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.374673 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vkqx\" (UniqueName: \"kubernetes.io/projected/514b35ec-b107-409a-a19d-dd3f2bdbb575-kube-api-access-8vkqx\") pod \"community-operators-vwlbb\" (UID: \"514b35ec-b107-409a-a19d-dd3f2bdbb575\") " pod="openshift-marketplace/community-operators-vwlbb" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.374723 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/514b35ec-b107-409a-a19d-dd3f2bdbb575-catalog-content\") pod \"community-operators-vwlbb\" (UID: \"514b35ec-b107-409a-a19d-dd3f2bdbb575\") " pod="openshift-marketplace/community-operators-vwlbb" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.374794 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/514b35ec-b107-409a-a19d-dd3f2bdbb575-utilities\") pod \"community-operators-vwlbb\" (UID: \"514b35ec-b107-409a-a19d-dd3f2bdbb575\") " pod="openshift-marketplace/community-operators-vwlbb" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.375258 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/514b35ec-b107-409a-a19d-dd3f2bdbb575-utilities\") pod \"community-operators-vwlbb\" (UID: \"514b35ec-b107-409a-a19d-dd3f2bdbb575\") " pod="openshift-marketplace/community-operators-vwlbb" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.375332 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/514b35ec-b107-409a-a19d-dd3f2bdbb575-catalog-content\") pod \"community-operators-vwlbb\" (UID: \"514b35ec-b107-409a-a19d-dd3f2bdbb575\") " pod="openshift-marketplace/community-operators-vwlbb" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.397207 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vkqx\" (UniqueName: \"kubernetes.io/projected/514b35ec-b107-409a-a19d-dd3f2bdbb575-kube-api-access-8vkqx\") pod \"community-operators-vwlbb\" (UID: \"514b35ec-b107-409a-a19d-dd3f2bdbb575\") " pod="openshift-marketplace/community-operators-vwlbb" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.554008 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vwlbb" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.594745 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs_b34ed569-9800-4521-bbd9-7e4249513755/pull/0.log" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.597218 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs_b34ed569-9800-4521-bbd9-7e4249513755/util/0.log" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.858389 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs_b34ed569-9800-4521-bbd9-7e4249513755/pull/0.log" Dec 02 17:55:51 crc kubenswrapper[4747]: I1202 17:55:51.977373 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs_b34ed569-9800-4521-bbd9-7e4249513755/util/0.log" Dec 02 17:55:52 crc kubenswrapper[4747]: I1202 17:55:52.066735 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vwlbb"] Dec 02 17:55:52 crc kubenswrapper[4747]: W1202 17:55:52.425693 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod514b35ec_b107_409a_a19d_dd3f2bdbb575.slice/crio-17c5dba876081e8348c9cb004b4f7dcd75cb6247e3e9c8fe45fa2d9f077da529 WatchSource:0}: Error finding container 17c5dba876081e8348c9cb004b4f7dcd75cb6247e3e9c8fe45fa2d9f077da529: Status 404 returned error can't find the container with id 17c5dba876081e8348c9cb004b4f7dcd75cb6247e3e9c8fe45fa2d9f077da529 Dec 02 17:55:52 crc kubenswrapper[4747]: I1202 17:55:52.624925 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs_b34ed569-9800-4521-bbd9-7e4249513755/pull/0.log" Dec 02 17:55:52 crc kubenswrapper[4747]: I1202 17:55:52.678287 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd_0309c5e1-1f38-47c3-adc5-553675c969a8/util/0.log" Dec 02 17:55:52 crc kubenswrapper[4747]: I1202 17:55:52.688497 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f2x8xs_b34ed569-9800-4521-bbd9-7e4249513755/extract/0.log" Dec 02 17:55:52 crc kubenswrapper[4747]: I1202 17:55:52.862623 4747 generic.go:334] "Generic (PLEG): container finished" podID="514b35ec-b107-409a-a19d-dd3f2bdbb575" containerID="2d3413f327a7ba32331668ae16d63f710b95417ca869179cdd423c1801170bd2" exitCode=0 Dec 02 17:55:52 crc kubenswrapper[4747]: I1202 17:55:52.862664 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwlbb" event={"ID":"514b35ec-b107-409a-a19d-dd3f2bdbb575","Type":"ContainerDied","Data":"2d3413f327a7ba32331668ae16d63f710b95417ca869179cdd423c1801170bd2"} Dec 02 17:55:52 crc kubenswrapper[4747]: I1202 17:55:52.862689 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwlbb" event={"ID":"514b35ec-b107-409a-a19d-dd3f2bdbb575","Type":"ContainerStarted","Data":"17c5dba876081e8348c9cb004b4f7dcd75cb6247e3e9c8fe45fa2d9f077da529"} Dec 02 17:55:52 crc kubenswrapper[4747]: I1202 17:55:52.877205 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd_0309c5e1-1f38-47c3-adc5-553675c969a8/util/0.log" Dec 02 17:55:52 crc kubenswrapper[4747]: I1202 17:55:52.898979 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd_0309c5e1-1f38-47c3-adc5-553675c969a8/pull/0.log" Dec 02 17:55:52 crc kubenswrapper[4747]: I1202 17:55:52.912893 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd_0309c5e1-1f38-47c3-adc5-553675c969a8/pull/0.log" Dec 02 17:55:53 crc kubenswrapper[4747]: I1202 17:55:53.063816 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd_0309c5e1-1f38-47c3-adc5-553675c969a8/util/0.log" Dec 02 17:55:53 crc kubenswrapper[4747]: I1202 17:55:53.098318 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd_0309c5e1-1f38-47c3-adc5-553675c969a8/pull/0.log" Dec 02 17:55:53 crc kubenswrapper[4747]: I1202 17:55:53.102245 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vr7fd_0309c5e1-1f38-47c3-adc5-553675c969a8/extract/0.log" Dec 02 17:55:53 crc kubenswrapper[4747]: I1202 17:55:53.269488 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wfvnl_233bbc8c-b118-4875-bc52-e626d7410c69/extract-utilities/0.log" Dec 02 17:55:53 crc kubenswrapper[4747]: I1202 17:55:53.430448 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wfvnl_233bbc8c-b118-4875-bc52-e626d7410c69/extract-utilities/0.log" Dec 02 17:55:53 crc kubenswrapper[4747]: I1202 17:55:53.475833 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wfvnl_233bbc8c-b118-4875-bc52-e626d7410c69/extract-content/0.log" Dec 02 17:55:53 crc kubenswrapper[4747]: I1202 17:55:53.493276 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wfvnl_233bbc8c-b118-4875-bc52-e626d7410c69/extract-content/0.log" Dec 02 17:55:53 crc kubenswrapper[4747]: I1202 17:55:53.616683 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wfvnl_233bbc8c-b118-4875-bc52-e626d7410c69/extract-utilities/0.log" Dec 02 17:55:53 crc kubenswrapper[4747]: I1202 17:55:53.751899 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wfvnl_233bbc8c-b118-4875-bc52-e626d7410c69/extract-content/0.log" Dec 02 17:55:53 crc kubenswrapper[4747]: I1202 17:55:53.872709 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwlbb" event={"ID":"514b35ec-b107-409a-a19d-dd3f2bdbb575","Type":"ContainerStarted","Data":"3264f88989a7421a8756dff21a35c2c84514b9c785666bb1eb0c6692f11f14f0"} Dec 02 17:55:53 crc kubenswrapper[4747]: I1202 17:55:53.908607 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlkb2_0ba8abe4-42dc-4c6d-93fa-4ed196ff9105/extract-utilities/0.log" Dec 02 17:55:54 crc kubenswrapper[4747]: I1202 17:55:54.086987 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlkb2_0ba8abe4-42dc-4c6d-93fa-4ed196ff9105/extract-content/0.log" Dec 02 17:55:54 crc kubenswrapper[4747]: I1202 17:55:54.137898 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlkb2_0ba8abe4-42dc-4c6d-93fa-4ed196ff9105/extract-utilities/0.log" Dec 02 17:55:54 crc kubenswrapper[4747]: I1202 17:55:54.177431 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlkb2_0ba8abe4-42dc-4c6d-93fa-4ed196ff9105/extract-content/0.log" Dec 02 17:55:54 crc kubenswrapper[4747]: I1202 17:55:54.234855 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-wfvnl_233bbc8c-b118-4875-bc52-e626d7410c69/registry-server/0.log" Dec 02 17:55:54 crc kubenswrapper[4747]: I1202 17:55:54.306577 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlkb2_0ba8abe4-42dc-4c6d-93fa-4ed196ff9105/extract-utilities/0.log" Dec 02 17:55:54 crc kubenswrapper[4747]: I1202 17:55:54.369663 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlkb2_0ba8abe4-42dc-4c6d-93fa-4ed196ff9105/extract-content/0.log" Dec 02 17:55:54 crc kubenswrapper[4747]: I1202 17:55:54.543773 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-2kmf2_dd32d5f2-ea95-4e1f-91f5-3e245c961bd6/marketplace-operator/0.log" Dec 02 17:55:54 crc kubenswrapper[4747]: I1202 17:55:54.608398 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2ptxc_7bb1d09f-243e-488b-af49-e7836ff452c8/extract-utilities/0.log" Dec 02 17:55:54 crc kubenswrapper[4747]: I1202 17:55:54.885209 4747 generic.go:334] "Generic (PLEG): container finished" podID="514b35ec-b107-409a-a19d-dd3f2bdbb575" containerID="3264f88989a7421a8756dff21a35c2c84514b9c785666bb1eb0c6692f11f14f0" exitCode=0 Dec 02 17:55:54 crc kubenswrapper[4747]: I1202 17:55:54.885441 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwlbb" event={"ID":"514b35ec-b107-409a-a19d-dd3f2bdbb575","Type":"ContainerDied","Data":"3264f88989a7421a8756dff21a35c2c84514b9c785666bb1eb0c6692f11f14f0"} Dec 02 17:55:55 crc kubenswrapper[4747]: I1202 17:55:55.002137 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xlkb2_0ba8abe4-42dc-4c6d-93fa-4ed196ff9105/registry-server/0.log" Dec 02 17:55:55 crc kubenswrapper[4747]: I1202 17:55:55.054509 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2ptxc_7bb1d09f-243e-488b-af49-e7836ff452c8/extract-content/0.log" Dec 02 17:55:55 crc kubenswrapper[4747]: I1202 17:55:55.102738 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2ptxc_7bb1d09f-243e-488b-af49-e7836ff452c8/extract-utilities/0.log" Dec 02 17:55:55 crc kubenswrapper[4747]: I1202 17:55:55.132164 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2ptxc_7bb1d09f-243e-488b-af49-e7836ff452c8/extract-content/0.log" Dec 02 17:55:55 crc kubenswrapper[4747]: I1202 17:55:55.279968 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2ptxc_7bb1d09f-243e-488b-af49-e7836ff452c8/extract-utilities/0.log" Dec 02 17:55:55 crc kubenswrapper[4747]: I1202 17:55:55.378367 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2ptxc_7bb1d09f-243e-488b-af49-e7836ff452c8/extract-content/0.log" Dec 02 17:55:55 crc kubenswrapper[4747]: I1202 17:55:55.400317 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p55sf_922bf4c0-8c15-43f0-a6f7-09601df7efe0/extract-utilities/0.log" Dec 02 17:55:55 crc kubenswrapper[4747]: I1202 17:55:55.564431 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p55sf_922bf4c0-8c15-43f0-a6f7-09601df7efe0/extract-utilities/0.log" Dec 02 17:55:55 crc kubenswrapper[4747]: I1202 17:55:55.612781 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p55sf_922bf4c0-8c15-43f0-a6f7-09601df7efe0/extract-content/0.log" Dec 02 17:55:55 crc kubenswrapper[4747]: I1202 17:55:55.627769 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-2ptxc_7bb1d09f-243e-488b-af49-e7836ff452c8/registry-server/0.log" Dec 02 17:55:55 crc kubenswrapper[4747]: I1202 17:55:55.641765 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p55sf_922bf4c0-8c15-43f0-a6f7-09601df7efe0/extract-content/0.log" Dec 02 17:55:55 crc kubenswrapper[4747]: I1202 17:55:55.790022 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p55sf_922bf4c0-8c15-43f0-a6f7-09601df7efe0/extract-utilities/0.log" Dec 02 17:55:55 crc kubenswrapper[4747]: I1202 17:55:55.799042 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p55sf_922bf4c0-8c15-43f0-a6f7-09601df7efe0/extract-content/0.log" Dec 02 17:55:55 crc kubenswrapper[4747]: I1202 17:55:55.898839 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwlbb" event={"ID":"514b35ec-b107-409a-a19d-dd3f2bdbb575","Type":"ContainerStarted","Data":"633bc240d484ac846baae4918ccce0f9db399c65f203d1c22c8fd3663466d1c2"} Dec 02 17:55:55 crc kubenswrapper[4747]: I1202 17:55:55.930874 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vwlbb" podStartSLOduration=2.220949711 podStartE2EDuration="4.930779856s" podCreationTimestamp="2025-12-02 17:55:51 +0000 UTC" firstStartedPulling="2025-12-02 17:55:52.86417057 +0000 UTC m=+4383.391059319" lastFinishedPulling="2025-12-02 17:55:55.574000715 +0000 UTC m=+4386.100889464" observedRunningTime="2025-12-02 17:55:55.923132199 +0000 UTC m=+4386.450020958" watchObservedRunningTime="2025-12-02 17:55:55.930779856 +0000 UTC m=+4386.457668605" Dec 02 17:55:56 crc kubenswrapper[4747]: I1202 17:55:56.262331 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-p55sf_922bf4c0-8c15-43f0-a6f7-09601df7efe0/registry-server/0.log" Dec 02 17:56:01 crc kubenswrapper[4747]: I1202 17:56:01.554217 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vwlbb" Dec 02 17:56:01 crc kubenswrapper[4747]: I1202 17:56:01.554576 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vwlbb" Dec 02 17:56:01 crc kubenswrapper[4747]: I1202 17:56:01.616723 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vwlbb" Dec 02 17:56:02 crc kubenswrapper[4747]: I1202 17:56:02.040700 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vwlbb" Dec 02 17:56:02 crc kubenswrapper[4747]: I1202 17:56:02.118065 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vwlbb"] Dec 02 17:56:03 crc kubenswrapper[4747]: I1202 17:56:03.992959 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vwlbb" podUID="514b35ec-b107-409a-a19d-dd3f2bdbb575" containerName="registry-server" containerID="cri-o://633bc240d484ac846baae4918ccce0f9db399c65f203d1c22c8fd3663466d1c2" gracePeriod=2 Dec 02 17:56:04 crc kubenswrapper[4747]: I1202 17:56:04.498528 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vwlbb" Dec 02 17:56:04 crc kubenswrapper[4747]: I1202 17:56:04.658574 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vkqx\" (UniqueName: \"kubernetes.io/projected/514b35ec-b107-409a-a19d-dd3f2bdbb575-kube-api-access-8vkqx\") pod \"514b35ec-b107-409a-a19d-dd3f2bdbb575\" (UID: \"514b35ec-b107-409a-a19d-dd3f2bdbb575\") " Dec 02 17:56:04 crc kubenswrapper[4747]: I1202 17:56:04.658662 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/514b35ec-b107-409a-a19d-dd3f2bdbb575-utilities\") pod \"514b35ec-b107-409a-a19d-dd3f2bdbb575\" (UID: \"514b35ec-b107-409a-a19d-dd3f2bdbb575\") " Dec 02 17:56:04 crc kubenswrapper[4747]: I1202 17:56:04.658848 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/514b35ec-b107-409a-a19d-dd3f2bdbb575-catalog-content\") pod \"514b35ec-b107-409a-a19d-dd3f2bdbb575\" (UID: \"514b35ec-b107-409a-a19d-dd3f2bdbb575\") " Dec 02 17:56:04 crc kubenswrapper[4747]: I1202 17:56:04.660206 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/514b35ec-b107-409a-a19d-dd3f2bdbb575-utilities" (OuterVolumeSpecName: "utilities") pod "514b35ec-b107-409a-a19d-dd3f2bdbb575" (UID: "514b35ec-b107-409a-a19d-dd3f2bdbb575"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:56:04 crc kubenswrapper[4747]: I1202 17:56:04.677448 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/514b35ec-b107-409a-a19d-dd3f2bdbb575-kube-api-access-8vkqx" (OuterVolumeSpecName: "kube-api-access-8vkqx") pod "514b35ec-b107-409a-a19d-dd3f2bdbb575" (UID: "514b35ec-b107-409a-a19d-dd3f2bdbb575"). InnerVolumeSpecName "kube-api-access-8vkqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:56:04 crc kubenswrapper[4747]: I1202 17:56:04.727682 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/514b35ec-b107-409a-a19d-dd3f2bdbb575-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "514b35ec-b107-409a-a19d-dd3f2bdbb575" (UID: "514b35ec-b107-409a-a19d-dd3f2bdbb575"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:56:04 crc kubenswrapper[4747]: I1202 17:56:04.761592 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/514b35ec-b107-409a-a19d-dd3f2bdbb575-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 17:56:04 crc kubenswrapper[4747]: I1202 17:56:04.761839 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vkqx\" (UniqueName: \"kubernetes.io/projected/514b35ec-b107-409a-a19d-dd3f2bdbb575-kube-api-access-8vkqx\") on node \"crc\" DevicePath \"\"" Dec 02 17:56:04 crc kubenswrapper[4747]: I1202 17:56:04.762075 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/514b35ec-b107-409a-a19d-dd3f2bdbb575-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 17:56:05 crc kubenswrapper[4747]: I1202 17:56:05.008012 4747 generic.go:334] "Generic (PLEG): container finished" podID="514b35ec-b107-409a-a19d-dd3f2bdbb575" containerID="633bc240d484ac846baae4918ccce0f9db399c65f203d1c22c8fd3663466d1c2" exitCode=0 Dec 02 17:56:05 crc kubenswrapper[4747]: I1202 17:56:05.008122 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwlbb" event={"ID":"514b35ec-b107-409a-a19d-dd3f2bdbb575","Type":"ContainerDied","Data":"633bc240d484ac846baae4918ccce0f9db399c65f203d1c22c8fd3663466d1c2"} Dec 02 17:56:05 crc kubenswrapper[4747]: I1202 17:56:05.008195 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vwlbb" Dec 02 17:56:05 crc kubenswrapper[4747]: I1202 17:56:05.008896 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwlbb" event={"ID":"514b35ec-b107-409a-a19d-dd3f2bdbb575","Type":"ContainerDied","Data":"17c5dba876081e8348c9cb004b4f7dcd75cb6247e3e9c8fe45fa2d9f077da529"} Dec 02 17:56:05 crc kubenswrapper[4747]: I1202 17:56:05.009040 4747 scope.go:117] "RemoveContainer" containerID="633bc240d484ac846baae4918ccce0f9db399c65f203d1c22c8fd3663466d1c2" Dec 02 17:56:05 crc kubenswrapper[4747]: I1202 17:56:05.043022 4747 scope.go:117] "RemoveContainer" containerID="3264f88989a7421a8756dff21a35c2c84514b9c785666bb1eb0c6692f11f14f0" Dec 02 17:56:05 crc kubenswrapper[4747]: I1202 17:56:05.056718 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vwlbb"] Dec 02 17:56:05 crc kubenswrapper[4747]: I1202 17:56:05.069818 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vwlbb"] Dec 02 17:56:05 crc kubenswrapper[4747]: I1202 17:56:05.084845 4747 scope.go:117] "RemoveContainer" containerID="2d3413f327a7ba32331668ae16d63f710b95417ca869179cdd423c1801170bd2" Dec 02 17:56:05 crc kubenswrapper[4747]: I1202 17:56:05.124256 4747 scope.go:117] "RemoveContainer" containerID="633bc240d484ac846baae4918ccce0f9db399c65f203d1c22c8fd3663466d1c2" Dec 02 17:56:05 crc kubenswrapper[4747]: E1202 17:56:05.124731 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"633bc240d484ac846baae4918ccce0f9db399c65f203d1c22c8fd3663466d1c2\": container with ID starting with 633bc240d484ac846baae4918ccce0f9db399c65f203d1c22c8fd3663466d1c2 not found: ID does not exist" containerID="633bc240d484ac846baae4918ccce0f9db399c65f203d1c22c8fd3663466d1c2" Dec 02 17:56:05 crc kubenswrapper[4747]: I1202 17:56:05.124966 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"633bc240d484ac846baae4918ccce0f9db399c65f203d1c22c8fd3663466d1c2"} err="failed to get container status \"633bc240d484ac846baae4918ccce0f9db399c65f203d1c22c8fd3663466d1c2\": rpc error: code = NotFound desc = could not find container \"633bc240d484ac846baae4918ccce0f9db399c65f203d1c22c8fd3663466d1c2\": container with ID starting with 633bc240d484ac846baae4918ccce0f9db399c65f203d1c22c8fd3663466d1c2 not found: ID does not exist" Dec 02 17:56:05 crc kubenswrapper[4747]: I1202 17:56:05.125167 4747 scope.go:117] "RemoveContainer" containerID="3264f88989a7421a8756dff21a35c2c84514b9c785666bb1eb0c6692f11f14f0" Dec 02 17:56:05 crc kubenswrapper[4747]: E1202 17:56:05.126279 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3264f88989a7421a8756dff21a35c2c84514b9c785666bb1eb0c6692f11f14f0\": container with ID starting with 3264f88989a7421a8756dff21a35c2c84514b9c785666bb1eb0c6692f11f14f0 not found: ID does not exist" containerID="3264f88989a7421a8756dff21a35c2c84514b9c785666bb1eb0c6692f11f14f0" Dec 02 17:56:05 crc kubenswrapper[4747]: I1202 17:56:05.126347 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3264f88989a7421a8756dff21a35c2c84514b9c785666bb1eb0c6692f11f14f0"} err="failed to get container status \"3264f88989a7421a8756dff21a35c2c84514b9c785666bb1eb0c6692f11f14f0\": rpc error: code = NotFound desc = could not find container \"3264f88989a7421a8756dff21a35c2c84514b9c785666bb1eb0c6692f11f14f0\": container with ID starting with 3264f88989a7421a8756dff21a35c2c84514b9c785666bb1eb0c6692f11f14f0 not found: ID does not exist" Dec 02 17:56:05 crc kubenswrapper[4747]: I1202 17:56:05.126390 4747 scope.go:117] "RemoveContainer" containerID="2d3413f327a7ba32331668ae16d63f710b95417ca869179cdd423c1801170bd2" Dec 02 17:56:05 crc kubenswrapper[4747]: E1202 17:56:05.126822 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d3413f327a7ba32331668ae16d63f710b95417ca869179cdd423c1801170bd2\": container with ID starting with 2d3413f327a7ba32331668ae16d63f710b95417ca869179cdd423c1801170bd2 not found: ID does not exist" containerID="2d3413f327a7ba32331668ae16d63f710b95417ca869179cdd423c1801170bd2" Dec 02 17:56:05 crc kubenswrapper[4747]: I1202 17:56:05.126863 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d3413f327a7ba32331668ae16d63f710b95417ca869179cdd423c1801170bd2"} err="failed to get container status \"2d3413f327a7ba32331668ae16d63f710b95417ca869179cdd423c1801170bd2\": rpc error: code = NotFound desc = could not find container \"2d3413f327a7ba32331668ae16d63f710b95417ca869179cdd423c1801170bd2\": container with ID starting with 2d3413f327a7ba32331668ae16d63f710b95417ca869179cdd423c1801170bd2 not found: ID does not exist" Dec 02 17:56:05 crc kubenswrapper[4747]: I1202 17:56:05.772020 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="514b35ec-b107-409a-a19d-dd3f2bdbb575" path="/var/lib/kubelet/pods/514b35ec-b107-409a-a19d-dd3f2bdbb575/volumes" Dec 02 17:57:39 crc kubenswrapper[4747]: I1202 17:57:39.272454 4747 generic.go:334] "Generic (PLEG): container finished" podID="32dfc24d-44c9-4a76-aecf-76e86bc57bdd" containerID="619d50729b6eb2a933a45f4ff1f0eb15025c6019073455c702e8c4fcef5f7c39" exitCode=0 Dec 02 17:57:39 crc kubenswrapper[4747]: I1202 17:57:39.272572 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-8rdk7/must-gather-8vrmn" event={"ID":"32dfc24d-44c9-4a76-aecf-76e86bc57bdd","Type":"ContainerDied","Data":"619d50729b6eb2a933a45f4ff1f0eb15025c6019073455c702e8c4fcef5f7c39"} Dec 02 17:57:39 crc kubenswrapper[4747]: I1202 17:57:39.273479 4747 scope.go:117] "RemoveContainer" containerID="619d50729b6eb2a933a45f4ff1f0eb15025c6019073455c702e8c4fcef5f7c39" Dec 02 17:57:39 crc kubenswrapper[4747]: I1202 17:57:39.940167 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8rdk7_must-gather-8vrmn_32dfc24d-44c9-4a76-aecf-76e86bc57bdd/gather/0.log" Dec 02 17:57:51 crc kubenswrapper[4747]: I1202 17:57:51.165968 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-8rdk7/must-gather-8vrmn"] Dec 02 17:57:51 crc kubenswrapper[4747]: I1202 17:57:51.166861 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-8rdk7/must-gather-8vrmn" podUID="32dfc24d-44c9-4a76-aecf-76e86bc57bdd" containerName="copy" containerID="cri-o://afdef2bd739c4f494dd07d3afcc124149a6542b21f14af17f82379d52cfb55c6" gracePeriod=2 Dec 02 17:57:51 crc kubenswrapper[4747]: I1202 17:57:51.182833 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-8rdk7/must-gather-8vrmn"] Dec 02 17:57:51 crc kubenswrapper[4747]: I1202 17:57:51.423854 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8rdk7_must-gather-8vrmn_32dfc24d-44c9-4a76-aecf-76e86bc57bdd/copy/0.log" Dec 02 17:57:51 crc kubenswrapper[4747]: I1202 17:57:51.424567 4747 generic.go:334] "Generic (PLEG): container finished" podID="32dfc24d-44c9-4a76-aecf-76e86bc57bdd" containerID="afdef2bd739c4f494dd07d3afcc124149a6542b21f14af17f82379d52cfb55c6" exitCode=143 Dec 02 17:57:52 crc kubenswrapper[4747]: I1202 17:57:52.007329 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8rdk7_must-gather-8vrmn_32dfc24d-44c9-4a76-aecf-76e86bc57bdd/copy/0.log" Dec 02 17:57:52 crc kubenswrapper[4747]: I1202 17:57:52.008037 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8rdk7/must-gather-8vrmn" Dec 02 17:57:52 crc kubenswrapper[4747]: I1202 17:57:52.126578 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/32dfc24d-44c9-4a76-aecf-76e86bc57bdd-must-gather-output\") pod \"32dfc24d-44c9-4a76-aecf-76e86bc57bdd\" (UID: \"32dfc24d-44c9-4a76-aecf-76e86bc57bdd\") " Dec 02 17:57:52 crc kubenswrapper[4747]: I1202 17:57:52.142272 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2b9f2\" (UniqueName: \"kubernetes.io/projected/32dfc24d-44c9-4a76-aecf-76e86bc57bdd-kube-api-access-2b9f2\") pod \"32dfc24d-44c9-4a76-aecf-76e86bc57bdd\" (UID: \"32dfc24d-44c9-4a76-aecf-76e86bc57bdd\") " Dec 02 17:57:52 crc kubenswrapper[4747]: I1202 17:57:52.153831 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32dfc24d-44c9-4a76-aecf-76e86bc57bdd-kube-api-access-2b9f2" (OuterVolumeSpecName: "kube-api-access-2b9f2") pod "32dfc24d-44c9-4a76-aecf-76e86bc57bdd" (UID: "32dfc24d-44c9-4a76-aecf-76e86bc57bdd"). InnerVolumeSpecName "kube-api-access-2b9f2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 17:57:52 crc kubenswrapper[4747]: I1202 17:57:52.245453 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2b9f2\" (UniqueName: \"kubernetes.io/projected/32dfc24d-44c9-4a76-aecf-76e86bc57bdd-kube-api-access-2b9f2\") on node \"crc\" DevicePath \"\"" Dec 02 17:57:52 crc kubenswrapper[4747]: I1202 17:57:52.267849 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32dfc24d-44c9-4a76-aecf-76e86bc57bdd-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "32dfc24d-44c9-4a76-aecf-76e86bc57bdd" (UID: "32dfc24d-44c9-4a76-aecf-76e86bc57bdd"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 17:57:52 crc kubenswrapper[4747]: I1202 17:57:52.346601 4747 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/32dfc24d-44c9-4a76-aecf-76e86bc57bdd-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 02 17:57:52 crc kubenswrapper[4747]: I1202 17:57:52.435602 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-8rdk7_must-gather-8vrmn_32dfc24d-44c9-4a76-aecf-76e86bc57bdd/copy/0.log" Dec 02 17:57:52 crc kubenswrapper[4747]: I1202 17:57:52.436049 4747 scope.go:117] "RemoveContainer" containerID="afdef2bd739c4f494dd07d3afcc124149a6542b21f14af17f82379d52cfb55c6" Dec 02 17:57:52 crc kubenswrapper[4747]: I1202 17:57:52.436211 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-8rdk7/must-gather-8vrmn" Dec 02 17:57:52 crc kubenswrapper[4747]: I1202 17:57:52.485248 4747 scope.go:117] "RemoveContainer" containerID="619d50729b6eb2a933a45f4ff1f0eb15025c6019073455c702e8c4fcef5f7c39" Dec 02 17:57:53 crc kubenswrapper[4747]: I1202 17:57:53.774712 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32dfc24d-44c9-4a76-aecf-76e86bc57bdd" path="/var/lib/kubelet/pods/32dfc24d-44c9-4a76-aecf-76e86bc57bdd/volumes" Dec 02 17:58:01 crc kubenswrapper[4747]: I1202 17:58:01.795466 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:58:01 crc kubenswrapper[4747]: I1202 17:58:01.796158 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:58:31 crc kubenswrapper[4747]: I1202 17:58:31.794726 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:58:31 crc kubenswrapper[4747]: I1202 17:58:31.795244 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:59:01 crc kubenswrapper[4747]: I1202 17:59:01.794890 4747 patch_prober.go:28] interesting pod/machine-config-daemon-m5zcc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 17:59:01 crc kubenswrapper[4747]: I1202 17:59:01.795580 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 17:59:01 crc kubenswrapper[4747]: I1202 17:59:01.795685 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" Dec 02 17:59:01 crc kubenswrapper[4747]: I1202 17:59:01.796814 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77"} pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 17:59:01 crc kubenswrapper[4747]: I1202 17:59:01.796946 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerName="machine-config-daemon" containerID="cri-o://7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77" gracePeriod=600 Dec 02 17:59:01 crc kubenswrapper[4747]: E1202 17:59:01.939656 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:59:02 crc kubenswrapper[4747]: I1202 17:59:02.329306 4747 generic.go:334] "Generic (PLEG): container finished" podID="444e1e59-7bc7-44cd-bb37-ed903442b724" containerID="7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77" exitCode=0 Dec 02 17:59:02 crc kubenswrapper[4747]: I1202 17:59:02.329374 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" event={"ID":"444e1e59-7bc7-44cd-bb37-ed903442b724","Type":"ContainerDied","Data":"7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77"} Dec 02 17:59:02 crc kubenswrapper[4747]: I1202 17:59:02.329436 4747 scope.go:117] "RemoveContainer" containerID="1d8f0c8a99109d44b7610ef8a9fe27391714d9f2d4fa74ebb42d42c2897c3ea2" Dec 02 17:59:02 crc kubenswrapper[4747]: I1202 17:59:02.330307 4747 scope.go:117] "RemoveContainer" containerID="7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77" Dec 02 17:59:02 crc kubenswrapper[4747]: E1202 17:59:02.330771 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:59:14 crc kubenswrapper[4747]: I1202 17:59:14.760613 4747 scope.go:117] "RemoveContainer" containerID="7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77" Dec 02 17:59:14 crc kubenswrapper[4747]: E1202 17:59:14.761563 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:59:28 crc kubenswrapper[4747]: I1202 17:59:28.761701 4747 scope.go:117] "RemoveContainer" containerID="7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77" Dec 02 17:59:28 crc kubenswrapper[4747]: E1202 17:59:28.762851 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:59:42 crc kubenswrapper[4747]: I1202 17:59:42.764567 4747 scope.go:117] "RemoveContainer" containerID="7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77" Dec 02 17:59:42 crc kubenswrapper[4747]: E1202 17:59:42.765811 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 17:59:54 crc kubenswrapper[4747]: I1202 17:59:54.761504 4747 scope.go:117] "RemoveContainer" containerID="7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77" Dec 02 17:59:54 crc kubenswrapper[4747]: E1202 17:59:54.762469 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.210232 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r"] Dec 02 18:00:00 crc kubenswrapper[4747]: E1202 18:00:00.211217 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="514b35ec-b107-409a-a19d-dd3f2bdbb575" containerName="registry-server" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.211234 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="514b35ec-b107-409a-a19d-dd3f2bdbb575" containerName="registry-server" Dec 02 18:00:00 crc kubenswrapper[4747]: E1202 18:00:00.211251 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32dfc24d-44c9-4a76-aecf-76e86bc57bdd" containerName="gather" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.211259 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="32dfc24d-44c9-4a76-aecf-76e86bc57bdd" containerName="gather" Dec 02 18:00:00 crc kubenswrapper[4747]: E1202 18:00:00.211275 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32dfc24d-44c9-4a76-aecf-76e86bc57bdd" containerName="copy" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.211285 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="32dfc24d-44c9-4a76-aecf-76e86bc57bdd" containerName="copy" Dec 02 18:00:00 crc kubenswrapper[4747]: E1202 18:00:00.211298 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="514b35ec-b107-409a-a19d-dd3f2bdbb575" containerName="extract-content" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.211306 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="514b35ec-b107-409a-a19d-dd3f2bdbb575" containerName="extract-content" Dec 02 18:00:00 crc kubenswrapper[4747]: E1202 18:00:00.211321 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="514b35ec-b107-409a-a19d-dd3f2bdbb575" containerName="extract-utilities" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.211329 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="514b35ec-b107-409a-a19d-dd3f2bdbb575" containerName="extract-utilities" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.211558 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="514b35ec-b107-409a-a19d-dd3f2bdbb575" containerName="registry-server" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.211596 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="32dfc24d-44c9-4a76-aecf-76e86bc57bdd" containerName="gather" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.211612 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="32dfc24d-44c9-4a76-aecf-76e86bc57bdd" containerName="copy" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.212532 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.214639 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.215469 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.231798 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r"] Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.408765 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/730231d5-3e51-4863-98a3-6b441542102b-config-volume\") pod \"collect-profiles-29411640-mvj4r\" (UID: \"730231d5-3e51-4863-98a3-6b441542102b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.408976 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2znb\" (UniqueName: \"kubernetes.io/projected/730231d5-3e51-4863-98a3-6b441542102b-kube-api-access-l2znb\") pod \"collect-profiles-29411640-mvj4r\" (UID: \"730231d5-3e51-4863-98a3-6b441542102b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.409344 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/730231d5-3e51-4863-98a3-6b441542102b-secret-volume\") pod \"collect-profiles-29411640-mvj4r\" (UID: \"730231d5-3e51-4863-98a3-6b441542102b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.511088 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2znb\" (UniqueName: \"kubernetes.io/projected/730231d5-3e51-4863-98a3-6b441542102b-kube-api-access-l2znb\") pod \"collect-profiles-29411640-mvj4r\" (UID: \"730231d5-3e51-4863-98a3-6b441542102b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.511330 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/730231d5-3e51-4863-98a3-6b441542102b-secret-volume\") pod \"collect-profiles-29411640-mvj4r\" (UID: \"730231d5-3e51-4863-98a3-6b441542102b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.511534 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/730231d5-3e51-4863-98a3-6b441542102b-config-volume\") pod \"collect-profiles-29411640-mvj4r\" (UID: \"730231d5-3e51-4863-98a3-6b441542102b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.513403 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/730231d5-3e51-4863-98a3-6b441542102b-config-volume\") pod \"collect-profiles-29411640-mvj4r\" (UID: \"730231d5-3e51-4863-98a3-6b441542102b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.522238 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/730231d5-3e51-4863-98a3-6b441542102b-secret-volume\") pod \"collect-profiles-29411640-mvj4r\" (UID: \"730231d5-3e51-4863-98a3-6b441542102b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.544726 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2znb\" (UniqueName: \"kubernetes.io/projected/730231d5-3e51-4863-98a3-6b441542102b-kube-api-access-l2znb\") pod \"collect-profiles-29411640-mvj4r\" (UID: \"730231d5-3e51-4863-98a3-6b441542102b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r" Dec 02 18:00:00 crc kubenswrapper[4747]: I1202 18:00:00.553611 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r" Dec 02 18:00:01 crc kubenswrapper[4747]: I1202 18:00:01.058376 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r"] Dec 02 18:00:02 crc kubenswrapper[4747]: I1202 18:00:02.065250 4747 generic.go:334] "Generic (PLEG): container finished" podID="730231d5-3e51-4863-98a3-6b441542102b" containerID="1a92c419fb3bde51df2b25d0270d5a039da4ce54be32d965be4d0faa84c732da" exitCode=0 Dec 02 18:00:02 crc kubenswrapper[4747]: I1202 18:00:02.065700 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r" event={"ID":"730231d5-3e51-4863-98a3-6b441542102b","Type":"ContainerDied","Data":"1a92c419fb3bde51df2b25d0270d5a039da4ce54be32d965be4d0faa84c732da"} Dec 02 18:00:02 crc kubenswrapper[4747]: I1202 18:00:02.065752 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r" event={"ID":"730231d5-3e51-4863-98a3-6b441542102b","Type":"ContainerStarted","Data":"92621ece66dd0c6bd92824a06ac54038919bca1a01fa1bb717d2c29d8b2b535e"} Dec 02 18:00:03 crc kubenswrapper[4747]: I1202 18:00:03.509463 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r" Dec 02 18:00:03 crc kubenswrapper[4747]: I1202 18:00:03.685597 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2znb\" (UniqueName: \"kubernetes.io/projected/730231d5-3e51-4863-98a3-6b441542102b-kube-api-access-l2znb\") pod \"730231d5-3e51-4863-98a3-6b441542102b\" (UID: \"730231d5-3e51-4863-98a3-6b441542102b\") " Dec 02 18:00:03 crc kubenswrapper[4747]: I1202 18:00:03.685938 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/730231d5-3e51-4863-98a3-6b441542102b-config-volume\") pod \"730231d5-3e51-4863-98a3-6b441542102b\" (UID: \"730231d5-3e51-4863-98a3-6b441542102b\") " Dec 02 18:00:03 crc kubenswrapper[4747]: I1202 18:00:03.685991 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/730231d5-3e51-4863-98a3-6b441542102b-secret-volume\") pod \"730231d5-3e51-4863-98a3-6b441542102b\" (UID: \"730231d5-3e51-4863-98a3-6b441542102b\") " Dec 02 18:00:03 crc kubenswrapper[4747]: I1202 18:00:03.688081 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/730231d5-3e51-4863-98a3-6b441542102b-config-volume" (OuterVolumeSpecName: "config-volume") pod "730231d5-3e51-4863-98a3-6b441542102b" (UID: "730231d5-3e51-4863-98a3-6b441542102b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 18:00:03 crc kubenswrapper[4747]: I1202 18:00:03.691723 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/730231d5-3e51-4863-98a3-6b441542102b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "730231d5-3e51-4863-98a3-6b441542102b" (UID: "730231d5-3e51-4863-98a3-6b441542102b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:00:03 crc kubenswrapper[4747]: I1202 18:00:03.694426 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/730231d5-3e51-4863-98a3-6b441542102b-kube-api-access-l2znb" (OuterVolumeSpecName: "kube-api-access-l2znb") pod "730231d5-3e51-4863-98a3-6b441542102b" (UID: "730231d5-3e51-4863-98a3-6b441542102b"). InnerVolumeSpecName "kube-api-access-l2znb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:00:03 crc kubenswrapper[4747]: I1202 18:00:03.788186 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/730231d5-3e51-4863-98a3-6b441542102b-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 18:00:03 crc kubenswrapper[4747]: I1202 18:00:03.788230 4747 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/730231d5-3e51-4863-98a3-6b441542102b-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 18:00:03 crc kubenswrapper[4747]: I1202 18:00:03.788244 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2znb\" (UniqueName: \"kubernetes.io/projected/730231d5-3e51-4863-98a3-6b441542102b-kube-api-access-l2znb\") on node \"crc\" DevicePath \"\"" Dec 02 18:00:04 crc kubenswrapper[4747]: I1202 18:00:04.109841 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r" event={"ID":"730231d5-3e51-4863-98a3-6b441542102b","Type":"ContainerDied","Data":"92621ece66dd0c6bd92824a06ac54038919bca1a01fa1bb717d2c29d8b2b535e"} Dec 02 18:00:04 crc kubenswrapper[4747]: I1202 18:00:04.109899 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="92621ece66dd0c6bd92824a06ac54038919bca1a01fa1bb717d2c29d8b2b535e" Dec 02 18:00:04 crc kubenswrapper[4747]: I1202 18:00:04.109959 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411640-mvj4r" Dec 02 18:00:04 crc kubenswrapper[4747]: I1202 18:00:04.617197 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c"] Dec 02 18:00:04 crc kubenswrapper[4747]: I1202 18:00:04.629829 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411595-mfh5c"] Dec 02 18:00:05 crc kubenswrapper[4747]: I1202 18:00:05.775507 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff8708f7-868b-4b10-bef7-db00715aeaad" path="/var/lib/kubelet/pods/ff8708f7-868b-4b10-bef7-db00715aeaad/volumes" Dec 02 18:00:08 crc kubenswrapper[4747]: I1202 18:00:08.779078 4747 scope.go:117] "RemoveContainer" containerID="7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77" Dec 02 18:00:08 crc kubenswrapper[4747]: E1202 18:00:08.779810 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 18:00:09 crc kubenswrapper[4747]: I1202 18:00:09.594483 4747 scope.go:117] "RemoveContainer" containerID="74096b5680e10adf153ca6b4e8afce29a479a5d7f19f832d10383a58d8b8fe1d" Dec 02 18:00:21 crc kubenswrapper[4747]: I1202 18:00:21.771856 4747 scope.go:117] "RemoveContainer" containerID="7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77" Dec 02 18:00:21 crc kubenswrapper[4747]: E1202 18:00:21.777027 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 18:00:33 crc kubenswrapper[4747]: I1202 18:00:33.760729 4747 scope.go:117] "RemoveContainer" containerID="7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77" Dec 02 18:00:33 crc kubenswrapper[4747]: E1202 18:00:33.761635 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 18:00:47 crc kubenswrapper[4747]: I1202 18:00:47.761001 4747 scope.go:117] "RemoveContainer" containerID="7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77" Dec 02 18:00:47 crc kubenswrapper[4747]: E1202 18:00:47.762014 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.189573 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29411641-srw9z"] Dec 02 18:01:00 crc kubenswrapper[4747]: E1202 18:01:00.190628 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="730231d5-3e51-4863-98a3-6b441542102b" containerName="collect-profiles" Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.190646 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="730231d5-3e51-4863-98a3-6b441542102b" containerName="collect-profiles" Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.190885 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="730231d5-3e51-4863-98a3-6b441542102b" containerName="collect-profiles" Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.191657 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29411641-srw9z" Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.215530 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29411641-srw9z"] Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.357168 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hftjl\" (UniqueName: \"kubernetes.io/projected/51620ca6-4919-4163-99bb-99fb21eb39a8-kube-api-access-hftjl\") pod \"keystone-cron-29411641-srw9z\" (UID: \"51620ca6-4919-4163-99bb-99fb21eb39a8\") " pod="openstack/keystone-cron-29411641-srw9z" Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.357239 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-fernet-keys\") pod \"keystone-cron-29411641-srw9z\" (UID: \"51620ca6-4919-4163-99bb-99fb21eb39a8\") " pod="openstack/keystone-cron-29411641-srw9z" Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.357335 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-combined-ca-bundle\") pod \"keystone-cron-29411641-srw9z\" (UID: \"51620ca6-4919-4163-99bb-99fb21eb39a8\") " pod="openstack/keystone-cron-29411641-srw9z" Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.357447 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-config-data\") pod \"keystone-cron-29411641-srw9z\" (UID: \"51620ca6-4919-4163-99bb-99fb21eb39a8\") " pod="openstack/keystone-cron-29411641-srw9z" Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.459017 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-combined-ca-bundle\") pod \"keystone-cron-29411641-srw9z\" (UID: \"51620ca6-4919-4163-99bb-99fb21eb39a8\") " pod="openstack/keystone-cron-29411641-srw9z" Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.459178 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-config-data\") pod \"keystone-cron-29411641-srw9z\" (UID: \"51620ca6-4919-4163-99bb-99fb21eb39a8\") " pod="openstack/keystone-cron-29411641-srw9z" Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.459344 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hftjl\" (UniqueName: \"kubernetes.io/projected/51620ca6-4919-4163-99bb-99fb21eb39a8-kube-api-access-hftjl\") pod \"keystone-cron-29411641-srw9z\" (UID: \"51620ca6-4919-4163-99bb-99fb21eb39a8\") " pod="openstack/keystone-cron-29411641-srw9z" Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.459387 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-fernet-keys\") pod \"keystone-cron-29411641-srw9z\" (UID: \"51620ca6-4919-4163-99bb-99fb21eb39a8\") " pod="openstack/keystone-cron-29411641-srw9z" Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.466741 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-config-data\") pod \"keystone-cron-29411641-srw9z\" (UID: \"51620ca6-4919-4163-99bb-99fb21eb39a8\") " pod="openstack/keystone-cron-29411641-srw9z" Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.472666 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-combined-ca-bundle\") pod \"keystone-cron-29411641-srw9z\" (UID: \"51620ca6-4919-4163-99bb-99fb21eb39a8\") " pod="openstack/keystone-cron-29411641-srw9z" Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.479116 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-fernet-keys\") pod \"keystone-cron-29411641-srw9z\" (UID: \"51620ca6-4919-4163-99bb-99fb21eb39a8\") " pod="openstack/keystone-cron-29411641-srw9z" Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.483781 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hftjl\" (UniqueName: \"kubernetes.io/projected/51620ca6-4919-4163-99bb-99fb21eb39a8-kube-api-access-hftjl\") pod \"keystone-cron-29411641-srw9z\" (UID: \"51620ca6-4919-4163-99bb-99fb21eb39a8\") " pod="openstack/keystone-cron-29411641-srw9z" Dec 02 18:01:00 crc kubenswrapper[4747]: I1202 18:01:00.535007 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29411641-srw9z" Dec 02 18:01:01 crc kubenswrapper[4747]: I1202 18:01:01.018643 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29411641-srw9z"] Dec 02 18:01:01 crc kubenswrapper[4747]: W1202 18:01:01.031744 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51620ca6_4919_4163_99bb_99fb21eb39a8.slice/crio-cd8796ce04746026695f68b048055c4f4ff28950467c896bb2c943e080c0faa4 WatchSource:0}: Error finding container cd8796ce04746026695f68b048055c4f4ff28950467c896bb2c943e080c0faa4: Status 404 returned error can't find the container with id cd8796ce04746026695f68b048055c4f4ff28950467c896bb2c943e080c0faa4 Dec 02 18:01:01 crc kubenswrapper[4747]: I1202 18:01:01.761058 4747 scope.go:117] "RemoveContainer" containerID="7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77" Dec 02 18:01:01 crc kubenswrapper[4747]: E1202 18:01:01.761841 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 18:01:01 crc kubenswrapper[4747]: I1202 18:01:01.866726 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29411641-srw9z" event={"ID":"51620ca6-4919-4163-99bb-99fb21eb39a8","Type":"ContainerStarted","Data":"6b4aa0acfdbafdbace9bd33c621b97b8b902bb99ffa6d6d7c73ac08db6326f6a"} Dec 02 18:01:01 crc kubenswrapper[4747]: I1202 18:01:01.866796 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29411641-srw9z" event={"ID":"51620ca6-4919-4163-99bb-99fb21eb39a8","Type":"ContainerStarted","Data":"cd8796ce04746026695f68b048055c4f4ff28950467c896bb2c943e080c0faa4"} Dec 02 18:01:01 crc kubenswrapper[4747]: I1202 18:01:01.895099 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29411641-srw9z" podStartSLOduration=1.895082544 podStartE2EDuration="1.895082544s" podCreationTimestamp="2025-12-02 18:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 18:01:01.889076923 +0000 UTC m=+4692.415965682" watchObservedRunningTime="2025-12-02 18:01:01.895082544 +0000 UTC m=+4692.421971303" Dec 02 18:01:03 crc kubenswrapper[4747]: I1202 18:01:03.890587 4747 generic.go:334] "Generic (PLEG): container finished" podID="51620ca6-4919-4163-99bb-99fb21eb39a8" containerID="6b4aa0acfdbafdbace9bd33c621b97b8b902bb99ffa6d6d7c73ac08db6326f6a" exitCode=0 Dec 02 18:01:03 crc kubenswrapper[4747]: I1202 18:01:03.890696 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29411641-srw9z" event={"ID":"51620ca6-4919-4163-99bb-99fb21eb39a8","Type":"ContainerDied","Data":"6b4aa0acfdbafdbace9bd33c621b97b8b902bb99ffa6d6d7c73ac08db6326f6a"} Dec 02 18:01:05 crc kubenswrapper[4747]: I1202 18:01:05.352055 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29411641-srw9z" Dec 02 18:01:05 crc kubenswrapper[4747]: I1202 18:01:05.376291 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-combined-ca-bundle\") pod \"51620ca6-4919-4163-99bb-99fb21eb39a8\" (UID: \"51620ca6-4919-4163-99bb-99fb21eb39a8\") " Dec 02 18:01:05 crc kubenswrapper[4747]: I1202 18:01:05.438968 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "51620ca6-4919-4163-99bb-99fb21eb39a8" (UID: "51620ca6-4919-4163-99bb-99fb21eb39a8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:01:05 crc kubenswrapper[4747]: I1202 18:01:05.478847 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-config-data\") pod \"51620ca6-4919-4163-99bb-99fb21eb39a8\" (UID: \"51620ca6-4919-4163-99bb-99fb21eb39a8\") " Dec 02 18:01:05 crc kubenswrapper[4747]: I1202 18:01:05.479068 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hftjl\" (UniqueName: \"kubernetes.io/projected/51620ca6-4919-4163-99bb-99fb21eb39a8-kube-api-access-hftjl\") pod \"51620ca6-4919-4163-99bb-99fb21eb39a8\" (UID: \"51620ca6-4919-4163-99bb-99fb21eb39a8\") " Dec 02 18:01:05 crc kubenswrapper[4747]: I1202 18:01:05.479256 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-fernet-keys\") pod \"51620ca6-4919-4163-99bb-99fb21eb39a8\" (UID: \"51620ca6-4919-4163-99bb-99fb21eb39a8\") " Dec 02 18:01:05 crc kubenswrapper[4747]: I1202 18:01:05.480029 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 18:01:05 crc kubenswrapper[4747]: I1202 18:01:05.483687 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51620ca6-4919-4163-99bb-99fb21eb39a8-kube-api-access-hftjl" (OuterVolumeSpecName: "kube-api-access-hftjl") pod "51620ca6-4919-4163-99bb-99fb21eb39a8" (UID: "51620ca6-4919-4163-99bb-99fb21eb39a8"). InnerVolumeSpecName "kube-api-access-hftjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 18:01:05 crc kubenswrapper[4747]: I1202 18:01:05.486384 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "51620ca6-4919-4163-99bb-99fb21eb39a8" (UID: "51620ca6-4919-4163-99bb-99fb21eb39a8"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:01:05 crc kubenswrapper[4747]: I1202 18:01:05.548720 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-config-data" (OuterVolumeSpecName: "config-data") pod "51620ca6-4919-4163-99bb-99fb21eb39a8" (UID: "51620ca6-4919-4163-99bb-99fb21eb39a8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 18:01:05 crc kubenswrapper[4747]: I1202 18:01:05.582352 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 18:01:05 crc kubenswrapper[4747]: I1202 18:01:05.582391 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hftjl\" (UniqueName: \"kubernetes.io/projected/51620ca6-4919-4163-99bb-99fb21eb39a8-kube-api-access-hftjl\") on node \"crc\" DevicePath \"\"" Dec 02 18:01:05 crc kubenswrapper[4747]: I1202 18:01:05.582405 4747 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/51620ca6-4919-4163-99bb-99fb21eb39a8-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 02 18:01:05 crc kubenswrapper[4747]: I1202 18:01:05.921237 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29411641-srw9z" event={"ID":"51620ca6-4919-4163-99bb-99fb21eb39a8","Type":"ContainerDied","Data":"cd8796ce04746026695f68b048055c4f4ff28950467c896bb2c943e080c0faa4"} Dec 02 18:01:05 crc kubenswrapper[4747]: I1202 18:01:05.921704 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd8796ce04746026695f68b048055c4f4ff28950467c896bb2c943e080c0faa4" Dec 02 18:01:05 crc kubenswrapper[4747]: I1202 18:01:05.921794 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29411641-srw9z" Dec 02 18:01:13 crc kubenswrapper[4747]: I1202 18:01:13.762109 4747 scope.go:117] "RemoveContainer" containerID="7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77" Dec 02 18:01:13 crc kubenswrapper[4747]: E1202 18:01:13.764453 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 18:01:25 crc kubenswrapper[4747]: I1202 18:01:25.761037 4747 scope.go:117] "RemoveContainer" containerID="7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77" Dec 02 18:01:25 crc kubenswrapper[4747]: E1202 18:01:25.762512 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 18:01:37 crc kubenswrapper[4747]: I1202 18:01:37.763479 4747 scope.go:117] "RemoveContainer" containerID="7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77" Dec 02 18:01:37 crc kubenswrapper[4747]: E1202 18:01:37.764829 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" Dec 02 18:01:48 crc kubenswrapper[4747]: I1202 18:01:48.760891 4747 scope.go:117] "RemoveContainer" containerID="7e850452b95de834d0a47abf31a79b376a929b7b07ba2f40f13b2e0e4c6a7e77" Dec 02 18:01:48 crc kubenswrapper[4747]: E1202 18:01:48.762078 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-m5zcc_openshift-machine-config-operator(444e1e59-7bc7-44cd-bb37-ed903442b724)\"" pod="openshift-machine-config-operator/machine-config-daemon-m5zcc" podUID="444e1e59-7bc7-44cd-bb37-ed903442b724" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515113624632024450 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015113624633017366 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015113613002016475 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015113613002015445 5ustar corecore